vulkan.hpp 4.4 MB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798117991180011801118021180311804118051180611807118081180911810118111181211813118141181511816118171181811819118201182111822118231182411825118261182711828118291183011831118321183311834118351183611837118381183911840118411184211843118441184511846118471184811849118501185111852118531185411855118561185711858118591186011861118621186311864118651186611867118681186911870118711187211873118741187511876118771187811879118801188111882118831188411885118861188711888118891189011891118921189311894118951189611897118981189911900119011190211903119041190511906119071190811909119101191111912119131191411915119161191711918119191192011921119221192311924119251192611927119281192911930119311193211933119341193511936119371193811939119401194111942119431194411945119461194711948119491195011951119521195311954119551195611957119581195911960119611196211963119641196511966119671196811969119701197111972119731197411975119761197711978119791198011981119821198311984119851198611987119881198911990119911199211993119941199511996119971199811999120001200112002120031200412005120061200712008120091201012011120121201312014120151201612017120181201912020120211202212023120241202512026120271202812029120301203112032120331203412035120361203712038120391204012041120421204312044120451204612047120481204912050120511205212053120541205512056120571205812059120601206112062120631206412065120661206712068120691207012071120721207312074120751207612077120781207912080120811208212083120841208512086120871208812089120901209112092120931209412095120961209712098120991210012101121021210312104121051210612107121081210912110121111211212113121141211512116121171211812119121201212112122121231212412125121261212712128121291213012131121321213312134121351213612137121381213912140121411214212143121441214512146121471214812149121501215112152121531215412155121561215712158121591216012161121621216312164121651216612167121681216912170121711217212173121741217512176121771217812179121801218112182121831218412185121861218712188121891219012191121921219312194121951219612197121981219912200122011220212203122041220512206122071220812209122101221112212122131221412215122161221712218122191222012221122221222312224122251222612227122281222912230122311223212233122341223512236122371223812239122401224112242122431224412245122461224712248122491225012251122521225312254122551225612257122581225912260122611226212263122641226512266122671226812269122701227112272122731227412275122761227712278122791228012281122821228312284122851228612287122881228912290122911229212293122941229512296122971229812299123001230112302123031230412305123061230712308123091231012311123121231312314123151231612317123181231912320123211232212323123241232512326123271232812329123301233112332123331233412335123361233712338123391234012341123421234312344123451234612347123481234912350123511235212353123541235512356123571235812359123601236112362123631236412365123661236712368123691237012371123721237312374123751237612377123781237912380123811238212383123841238512386123871238812389123901239112392123931239412395123961239712398123991240012401124021240312404124051240612407124081240912410124111241212413124141241512416124171241812419124201242112422124231242412425124261242712428124291243012431124321243312434124351243612437124381243912440124411244212443124441244512446124471244812449124501245112452124531245412455124561245712458124591246012461124621246312464124651246612467124681246912470124711247212473124741247512476124771247812479124801248112482124831248412485124861248712488124891249012491124921249312494124951249612497124981249912500125011250212503125041250512506125071250812509125101251112512125131251412515125161251712518125191252012521125221252312524125251252612527125281252912530125311253212533125341253512536125371253812539125401254112542125431254412545125461254712548125491255012551125521255312554125551255612557125581255912560125611256212563125641256512566125671256812569125701257112572125731257412575125761257712578125791258012581125821258312584125851258612587125881258912590125911259212593125941259512596125971259812599126001260112602126031260412605126061260712608126091261012611126121261312614126151261612617126181261912620126211262212623126241262512626126271262812629126301263112632126331263412635126361263712638126391264012641126421264312644126451264612647126481264912650126511265212653126541265512656126571265812659126601266112662126631266412665126661266712668126691267012671126721267312674126751267612677126781267912680126811268212683126841268512686126871268812689126901269112692126931269412695126961269712698126991270012701127021270312704127051270612707127081270912710127111271212713127141271512716127171271812719127201272112722127231272412725127261272712728127291273012731127321273312734127351273612737127381273912740127411274212743127441274512746127471274812749127501275112752127531275412755127561275712758127591276012761127621276312764127651276612767127681276912770127711277212773127741277512776127771277812779127801278112782127831278412785127861278712788127891279012791127921279312794127951279612797127981279912800128011280212803128041280512806128071280812809128101281112812128131281412815128161281712818128191282012821128221282312824128251282612827128281282912830128311283212833128341283512836128371283812839128401284112842128431284412845128461284712848128491285012851128521285312854128551285612857128581285912860128611286212863128641286512866128671286812869128701287112872128731287412875128761287712878128791288012881128821288312884128851288612887128881288912890128911289212893128941289512896128971289812899129001290112902129031290412905129061290712908129091291012911129121291312914129151291612917129181291912920129211292212923129241292512926129271292812929129301293112932129331293412935129361293712938129391294012941129421294312944129451294612947129481294912950129511295212953129541295512956129571295812959129601296112962129631296412965129661296712968129691297012971129721297312974129751297612977129781297912980129811298212983129841298512986129871298812989129901299112992129931299412995129961299712998129991300013001130021300313004130051300613007130081300913010130111301213013130141301513016130171301813019130201302113022130231302413025130261302713028130291303013031130321303313034130351303613037130381303913040130411304213043130441304513046130471304813049130501305113052130531305413055130561305713058130591306013061130621306313064130651306613067130681306913070130711307213073130741307513076130771307813079130801308113082130831308413085130861308713088130891309013091130921309313094130951309613097130981309913100131011310213103131041310513106131071310813109131101311113112131131311413115131161311713118131191312013121131221312313124131251312613127131281312913130131311313213133131341313513136131371313813139131401314113142131431314413145131461314713148131491315013151131521315313154131551315613157131581315913160131611316213163131641316513166131671316813169131701317113172131731317413175131761317713178131791318013181131821318313184131851318613187131881318913190131911319213193131941319513196131971319813199132001320113202132031320413205132061320713208132091321013211132121321313214132151321613217132181321913220132211322213223132241322513226132271322813229132301323113232132331323413235132361323713238132391324013241132421324313244132451324613247132481324913250132511325213253132541325513256132571325813259132601326113262132631326413265132661326713268132691327013271132721327313274132751327613277132781327913280132811328213283132841328513286132871328813289132901329113292132931329413295132961329713298132991330013301133021330313304133051330613307133081330913310133111331213313133141331513316133171331813319133201332113322133231332413325133261332713328133291333013331133321333313334133351333613337133381333913340133411334213343133441334513346133471334813349133501335113352133531335413355133561335713358133591336013361133621336313364133651336613367133681336913370133711337213373133741337513376133771337813379133801338113382133831338413385133861338713388133891339013391133921339313394133951339613397133981339913400134011340213403134041340513406134071340813409134101341113412134131341413415134161341713418134191342013421134221342313424134251342613427134281342913430134311343213433134341343513436134371343813439134401344113442134431344413445134461344713448134491345013451134521345313454134551345613457134581345913460134611346213463134641346513466134671346813469134701347113472134731347413475134761347713478134791348013481134821348313484134851348613487134881348913490134911349213493134941349513496134971349813499135001350113502135031350413505135061350713508135091351013511135121351313514135151351613517135181351913520135211352213523135241352513526135271352813529135301353113532135331353413535135361353713538135391354013541135421354313544135451354613547135481354913550135511355213553135541355513556135571355813559135601356113562135631356413565135661356713568135691357013571135721357313574135751357613577135781357913580135811358213583135841358513586135871358813589135901359113592135931359413595135961359713598135991360013601136021360313604136051360613607136081360913610136111361213613136141361513616136171361813619136201362113622136231362413625136261362713628136291363013631136321363313634136351363613637136381363913640136411364213643136441364513646136471364813649136501365113652136531365413655136561365713658136591366013661136621366313664136651366613667136681366913670136711367213673136741367513676136771367813679136801368113682136831368413685136861368713688136891369013691136921369313694136951369613697136981369913700137011370213703137041370513706137071370813709137101371113712137131371413715137161371713718137191372013721137221372313724137251372613727137281372913730137311373213733137341373513736137371373813739137401374113742137431374413745137461374713748137491375013751137521375313754137551375613757137581375913760137611376213763137641376513766137671376813769137701377113772137731377413775137761377713778137791378013781137821378313784137851378613787137881378913790137911379213793137941379513796137971379813799138001380113802138031380413805138061380713808138091381013811138121381313814138151381613817138181381913820138211382213823138241382513826138271382813829138301383113832138331383413835138361383713838138391384013841138421384313844138451384613847138481384913850138511385213853138541385513856138571385813859138601386113862138631386413865138661386713868138691387013871138721387313874138751387613877138781387913880138811388213883138841388513886138871388813889138901389113892138931389413895138961389713898138991390013901139021390313904139051390613907139081390913910139111391213913139141391513916139171391813919139201392113922139231392413925139261392713928139291393013931139321393313934139351393613937139381393913940139411394213943139441394513946139471394813949139501395113952139531395413955139561395713958139591396013961139621396313964139651396613967139681396913970139711397213973139741397513976139771397813979139801398113982139831398413985139861398713988139891399013991139921399313994139951399613997139981399914000140011400214003140041400514006140071400814009140101401114012140131401414015140161401714018140191402014021140221402314024140251402614027140281402914030140311403214033140341403514036140371403814039140401404114042140431404414045140461404714048140491405014051140521405314054140551405614057140581405914060140611406214063140641406514066140671406814069140701407114072140731407414075140761407714078140791408014081140821408314084140851408614087140881408914090140911409214093140941409514096140971409814099141001410114102141031410414105141061410714108141091411014111141121411314114141151411614117141181411914120141211412214123141241412514126141271412814129141301413114132141331413414135141361413714138141391414014141141421414314144141451414614147141481414914150141511415214153141541415514156141571415814159141601416114162141631416414165141661416714168141691417014171141721417314174141751417614177141781417914180141811418214183141841418514186141871418814189141901419114192141931419414195141961419714198141991420014201142021420314204142051420614207142081420914210142111421214213142141421514216142171421814219142201422114222142231422414225142261422714228142291423014231142321423314234142351423614237142381423914240142411424214243142441424514246142471424814249142501425114252142531425414255142561425714258142591426014261142621426314264142651426614267142681426914270142711427214273142741427514276142771427814279142801428114282142831428414285142861428714288142891429014291142921429314294142951429614297142981429914300143011430214303143041430514306143071430814309143101431114312143131431414315143161431714318143191432014321143221432314324143251432614327143281432914330143311433214333143341433514336143371433814339143401434114342143431434414345143461434714348143491435014351143521435314354143551435614357143581435914360143611436214363143641436514366143671436814369143701437114372143731437414375143761437714378143791438014381143821438314384143851438614387143881438914390143911439214393143941439514396143971439814399144001440114402144031440414405144061440714408144091441014411144121441314414144151441614417144181441914420144211442214423144241442514426144271442814429144301443114432144331443414435144361443714438144391444014441144421444314444144451444614447144481444914450144511445214453144541445514456144571445814459144601446114462144631446414465144661446714468144691447014471144721447314474144751447614477144781447914480144811448214483144841448514486144871448814489144901449114492144931449414495144961449714498144991450014501145021450314504145051450614507145081450914510145111451214513145141451514516145171451814519145201452114522145231452414525145261452714528145291453014531145321453314534145351453614537145381453914540145411454214543145441454514546145471454814549145501455114552145531455414555145561455714558145591456014561145621456314564145651456614567145681456914570145711457214573145741457514576145771457814579145801458114582145831458414585145861458714588145891459014591145921459314594145951459614597145981459914600146011460214603146041460514606146071460814609146101461114612146131461414615146161461714618146191462014621146221462314624146251462614627146281462914630146311463214633146341463514636146371463814639146401464114642146431464414645146461464714648146491465014651146521465314654146551465614657146581465914660146611466214663146641466514666146671466814669146701467114672146731467414675146761467714678146791468014681146821468314684146851468614687146881468914690146911469214693146941469514696146971469814699147001470114702147031470414705147061470714708147091471014711147121471314714147151471614717147181471914720147211472214723147241472514726147271472814729147301473114732147331473414735147361473714738147391474014741147421474314744147451474614747147481474914750147511475214753147541475514756147571475814759147601476114762147631476414765147661476714768147691477014771147721477314774147751477614777147781477914780147811478214783147841478514786147871478814789147901479114792147931479414795147961479714798147991480014801148021480314804148051480614807148081480914810148111481214813148141481514816148171481814819148201482114822148231482414825148261482714828148291483014831148321483314834148351483614837148381483914840148411484214843148441484514846148471484814849148501485114852148531485414855148561485714858148591486014861148621486314864148651486614867148681486914870148711487214873148741487514876148771487814879148801488114882148831488414885148861488714888148891489014891148921489314894148951489614897148981489914900149011490214903149041490514906149071490814909149101491114912149131491414915149161491714918149191492014921149221492314924149251492614927149281492914930149311493214933149341493514936149371493814939149401494114942149431494414945149461494714948149491495014951149521495314954149551495614957149581495914960149611496214963149641496514966149671496814969149701497114972149731497414975149761497714978149791498014981149821498314984149851498614987149881498914990149911499214993149941499514996149971499814999150001500115002150031500415005150061500715008150091501015011150121501315014150151501615017150181501915020150211502215023150241502515026150271502815029150301503115032150331503415035150361503715038150391504015041150421504315044150451504615047150481504915050150511505215053150541505515056150571505815059150601506115062150631506415065150661506715068150691507015071150721507315074150751507615077150781507915080150811508215083150841508515086150871508815089150901509115092150931509415095150961509715098150991510015101151021510315104151051510615107151081510915110151111511215113151141511515116151171511815119151201512115122151231512415125151261512715128151291513015131151321513315134151351513615137151381513915140151411514215143151441514515146151471514815149151501515115152151531515415155151561515715158151591516015161151621516315164151651516615167151681516915170151711517215173151741517515176151771517815179151801518115182151831518415185151861518715188151891519015191151921519315194151951519615197151981519915200152011520215203152041520515206152071520815209152101521115212152131521415215152161521715218152191522015221152221522315224152251522615227152281522915230152311523215233152341523515236152371523815239152401524115242152431524415245152461524715248152491525015251152521525315254152551525615257152581525915260152611526215263152641526515266152671526815269152701527115272152731527415275152761527715278152791528015281152821528315284152851528615287152881528915290152911529215293152941529515296152971529815299153001530115302153031530415305153061530715308153091531015311153121531315314153151531615317153181531915320153211532215323153241532515326153271532815329153301533115332153331533415335153361533715338153391534015341153421534315344153451534615347153481534915350153511535215353153541535515356153571535815359153601536115362153631536415365153661536715368153691537015371153721537315374153751537615377153781537915380153811538215383153841538515386153871538815389153901539115392153931539415395153961539715398153991540015401154021540315404154051540615407154081540915410154111541215413154141541515416154171541815419154201542115422154231542415425154261542715428154291543015431154321543315434154351543615437154381543915440154411544215443154441544515446154471544815449154501545115452154531545415455154561545715458154591546015461154621546315464154651546615467154681546915470154711547215473154741547515476154771547815479154801548115482154831548415485154861548715488154891549015491154921549315494154951549615497154981549915500155011550215503155041550515506155071550815509155101551115512155131551415515155161551715518155191552015521155221552315524155251552615527155281552915530155311553215533155341553515536155371553815539155401554115542155431554415545155461554715548155491555015551155521555315554155551555615557155581555915560155611556215563155641556515566155671556815569155701557115572155731557415575155761557715578155791558015581155821558315584155851558615587155881558915590155911559215593155941559515596155971559815599156001560115602156031560415605156061560715608156091561015611156121561315614156151561615617156181561915620156211562215623156241562515626156271562815629156301563115632156331563415635156361563715638156391564015641156421564315644156451564615647156481564915650156511565215653156541565515656156571565815659156601566115662156631566415665156661566715668156691567015671156721567315674156751567615677156781567915680156811568215683156841568515686156871568815689156901569115692156931569415695156961569715698156991570015701157021570315704157051570615707157081570915710157111571215713157141571515716157171571815719157201572115722157231572415725157261572715728157291573015731157321573315734157351573615737157381573915740157411574215743157441574515746157471574815749157501575115752157531575415755157561575715758157591576015761157621576315764157651576615767157681576915770157711577215773157741577515776157771577815779157801578115782157831578415785157861578715788157891579015791157921579315794157951579615797157981579915800158011580215803158041580515806158071580815809158101581115812158131581415815158161581715818158191582015821158221582315824158251582615827158281582915830158311583215833158341583515836158371583815839158401584115842158431584415845158461584715848158491585015851158521585315854158551585615857158581585915860158611586215863158641586515866158671586815869158701587115872158731587415875158761587715878158791588015881158821588315884158851588615887158881588915890158911589215893158941589515896158971589815899159001590115902159031590415905159061590715908159091591015911159121591315914159151591615917159181591915920159211592215923159241592515926159271592815929159301593115932159331593415935159361593715938159391594015941159421594315944159451594615947159481594915950159511595215953159541595515956159571595815959159601596115962159631596415965159661596715968159691597015971159721597315974159751597615977159781597915980159811598215983159841598515986159871598815989159901599115992159931599415995159961599715998159991600016001160021600316004160051600616007160081600916010160111601216013160141601516016160171601816019160201602116022160231602416025160261602716028160291603016031160321603316034160351603616037160381603916040160411604216043160441604516046160471604816049160501605116052160531605416055160561605716058160591606016061160621606316064160651606616067160681606916070160711607216073160741607516076160771607816079160801608116082160831608416085160861608716088160891609016091160921609316094160951609616097160981609916100161011610216103161041610516106161071610816109161101611116112161131611416115161161611716118161191612016121161221612316124161251612616127161281612916130161311613216133161341613516136161371613816139161401614116142161431614416145161461614716148161491615016151161521615316154161551615616157161581615916160161611616216163161641616516166161671616816169161701617116172161731617416175161761617716178161791618016181161821618316184161851618616187161881618916190161911619216193161941619516196161971619816199162001620116202162031620416205162061620716208162091621016211162121621316214162151621616217162181621916220162211622216223162241622516226162271622816229162301623116232162331623416235162361623716238162391624016241162421624316244162451624616247162481624916250162511625216253162541625516256162571625816259162601626116262162631626416265162661626716268162691627016271162721627316274162751627616277162781627916280162811628216283162841628516286162871628816289162901629116292162931629416295162961629716298162991630016301163021630316304163051630616307163081630916310163111631216313163141631516316163171631816319163201632116322163231632416325163261632716328163291633016331163321633316334163351633616337163381633916340163411634216343163441634516346163471634816349163501635116352163531635416355163561635716358163591636016361163621636316364163651636616367163681636916370163711637216373163741637516376163771637816379163801638116382163831638416385163861638716388163891639016391163921639316394163951639616397163981639916400164011640216403164041640516406164071640816409164101641116412164131641416415164161641716418164191642016421164221642316424164251642616427164281642916430164311643216433164341643516436164371643816439164401644116442164431644416445164461644716448164491645016451164521645316454164551645616457164581645916460164611646216463164641646516466164671646816469164701647116472164731647416475164761647716478164791648016481164821648316484164851648616487164881648916490164911649216493164941649516496164971649816499165001650116502165031650416505165061650716508165091651016511165121651316514165151651616517165181651916520165211652216523165241652516526165271652816529165301653116532165331653416535165361653716538165391654016541165421654316544165451654616547165481654916550165511655216553165541655516556165571655816559165601656116562165631656416565165661656716568165691657016571165721657316574165751657616577165781657916580165811658216583165841658516586165871658816589165901659116592165931659416595165961659716598165991660016601166021660316604166051660616607166081660916610166111661216613166141661516616166171661816619166201662116622166231662416625166261662716628166291663016631166321663316634166351663616637166381663916640166411664216643166441664516646166471664816649166501665116652166531665416655166561665716658166591666016661166621666316664166651666616667166681666916670166711667216673166741667516676166771667816679166801668116682166831668416685166861668716688166891669016691166921669316694166951669616697166981669916700167011670216703167041670516706167071670816709167101671116712167131671416715167161671716718167191672016721167221672316724167251672616727167281672916730167311673216733167341673516736167371673816739167401674116742167431674416745167461674716748167491675016751167521675316754167551675616757167581675916760167611676216763167641676516766167671676816769167701677116772167731677416775167761677716778167791678016781167821678316784167851678616787167881678916790167911679216793167941679516796167971679816799168001680116802168031680416805168061680716808168091681016811168121681316814168151681616817168181681916820168211682216823168241682516826168271682816829168301683116832168331683416835168361683716838168391684016841168421684316844168451684616847168481684916850168511685216853168541685516856168571685816859168601686116862168631686416865168661686716868168691687016871168721687316874168751687616877168781687916880168811688216883168841688516886168871688816889168901689116892168931689416895168961689716898168991690016901169021690316904169051690616907169081690916910169111691216913169141691516916169171691816919169201692116922169231692416925169261692716928169291693016931169321693316934169351693616937169381693916940169411694216943169441694516946169471694816949169501695116952169531695416955169561695716958169591696016961169621696316964169651696616967169681696916970169711697216973169741697516976169771697816979169801698116982169831698416985169861698716988169891699016991169921699316994169951699616997169981699917000170011700217003170041700517006170071700817009170101701117012170131701417015170161701717018170191702017021170221702317024170251702617027170281702917030170311703217033170341703517036170371703817039170401704117042170431704417045170461704717048170491705017051170521705317054170551705617057170581705917060170611706217063170641706517066170671706817069170701707117072170731707417075170761707717078170791708017081170821708317084170851708617087170881708917090170911709217093170941709517096170971709817099171001710117102171031710417105171061710717108171091711017111171121711317114171151711617117171181711917120171211712217123171241712517126171271712817129171301713117132171331713417135171361713717138171391714017141171421714317144171451714617147171481714917150171511715217153171541715517156171571715817159171601716117162171631716417165171661716717168171691717017171171721717317174171751717617177171781717917180171811718217183171841718517186171871718817189171901719117192171931719417195171961719717198171991720017201172021720317204172051720617207172081720917210172111721217213172141721517216172171721817219172201722117222172231722417225172261722717228172291723017231172321723317234172351723617237172381723917240172411724217243172441724517246172471724817249172501725117252172531725417255172561725717258172591726017261172621726317264172651726617267172681726917270172711727217273172741727517276172771727817279172801728117282172831728417285172861728717288172891729017291172921729317294172951729617297172981729917300173011730217303173041730517306173071730817309173101731117312173131731417315173161731717318173191732017321173221732317324173251732617327173281732917330173311733217333173341733517336173371733817339173401734117342173431734417345173461734717348173491735017351173521735317354173551735617357173581735917360173611736217363173641736517366173671736817369173701737117372173731737417375173761737717378173791738017381173821738317384173851738617387173881738917390173911739217393173941739517396173971739817399174001740117402174031740417405174061740717408174091741017411174121741317414174151741617417174181741917420174211742217423174241742517426174271742817429174301743117432174331743417435174361743717438174391744017441174421744317444174451744617447174481744917450174511745217453174541745517456174571745817459174601746117462174631746417465174661746717468174691747017471174721747317474174751747617477174781747917480174811748217483174841748517486174871748817489174901749117492174931749417495174961749717498174991750017501175021750317504175051750617507175081750917510175111751217513175141751517516175171751817519175201752117522175231752417525175261752717528175291753017531175321753317534175351753617537175381753917540175411754217543175441754517546175471754817549175501755117552175531755417555175561755717558175591756017561175621756317564175651756617567175681756917570175711757217573175741757517576175771757817579175801758117582175831758417585175861758717588175891759017591175921759317594175951759617597175981759917600176011760217603176041760517606176071760817609176101761117612176131761417615176161761717618176191762017621176221762317624176251762617627176281762917630176311763217633176341763517636176371763817639176401764117642176431764417645176461764717648176491765017651176521765317654176551765617657176581765917660176611766217663176641766517666176671766817669176701767117672176731767417675176761767717678176791768017681176821768317684176851768617687176881768917690176911769217693176941769517696176971769817699177001770117702177031770417705177061770717708177091771017711177121771317714177151771617717177181771917720177211772217723177241772517726177271772817729177301773117732177331773417735177361773717738177391774017741177421774317744177451774617747177481774917750177511775217753177541775517756177571775817759177601776117762177631776417765177661776717768177691777017771177721777317774177751777617777177781777917780177811778217783177841778517786177871778817789177901779117792177931779417795177961779717798177991780017801178021780317804178051780617807178081780917810178111781217813178141781517816178171781817819178201782117822178231782417825178261782717828178291783017831178321783317834178351783617837178381783917840178411784217843178441784517846178471784817849178501785117852178531785417855178561785717858178591786017861178621786317864178651786617867178681786917870178711787217873178741787517876178771787817879178801788117882178831788417885178861788717888178891789017891178921789317894178951789617897178981789917900179011790217903179041790517906179071790817909179101791117912179131791417915179161791717918179191792017921179221792317924179251792617927179281792917930179311793217933179341793517936179371793817939179401794117942179431794417945179461794717948179491795017951179521795317954179551795617957179581795917960179611796217963179641796517966179671796817969179701797117972179731797417975179761797717978179791798017981179821798317984179851798617987179881798917990179911799217993179941799517996179971799817999180001800118002180031800418005180061800718008180091801018011180121801318014180151801618017180181801918020180211802218023180241802518026180271802818029180301803118032180331803418035180361803718038180391804018041180421804318044180451804618047180481804918050180511805218053180541805518056180571805818059180601806118062180631806418065180661806718068180691807018071180721807318074180751807618077180781807918080180811808218083180841808518086180871808818089180901809118092180931809418095180961809718098180991810018101181021810318104181051810618107181081810918110181111811218113181141811518116181171811818119181201812118122181231812418125181261812718128181291813018131181321813318134181351813618137181381813918140181411814218143181441814518146181471814818149181501815118152181531815418155181561815718158181591816018161181621816318164181651816618167181681816918170181711817218173181741817518176181771817818179181801818118182181831818418185181861818718188181891819018191181921819318194181951819618197181981819918200182011820218203182041820518206182071820818209182101821118212182131821418215182161821718218182191822018221182221822318224182251822618227182281822918230182311823218233182341823518236182371823818239182401824118242182431824418245182461824718248182491825018251182521825318254182551825618257182581825918260182611826218263182641826518266182671826818269182701827118272182731827418275182761827718278182791828018281182821828318284182851828618287182881828918290182911829218293182941829518296182971829818299183001830118302183031830418305183061830718308183091831018311183121831318314183151831618317183181831918320183211832218323183241832518326183271832818329183301833118332183331833418335183361833718338183391834018341183421834318344183451834618347183481834918350183511835218353183541835518356183571835818359183601836118362183631836418365183661836718368183691837018371183721837318374183751837618377183781837918380183811838218383183841838518386183871838818389183901839118392183931839418395183961839718398183991840018401184021840318404184051840618407184081840918410184111841218413184141841518416184171841818419184201842118422184231842418425184261842718428184291843018431184321843318434184351843618437184381843918440184411844218443184441844518446184471844818449184501845118452184531845418455184561845718458184591846018461184621846318464184651846618467184681846918470184711847218473184741847518476184771847818479184801848118482184831848418485184861848718488184891849018491184921849318494184951849618497184981849918500185011850218503185041850518506185071850818509185101851118512185131851418515185161851718518185191852018521185221852318524185251852618527185281852918530185311853218533185341853518536185371853818539185401854118542185431854418545185461854718548185491855018551185521855318554185551855618557185581855918560185611856218563185641856518566185671856818569185701857118572185731857418575185761857718578185791858018581185821858318584185851858618587185881858918590185911859218593185941859518596185971859818599186001860118602186031860418605186061860718608186091861018611186121861318614186151861618617186181861918620186211862218623186241862518626186271862818629186301863118632186331863418635186361863718638186391864018641186421864318644186451864618647186481864918650186511865218653186541865518656186571865818659186601866118662186631866418665186661866718668186691867018671186721867318674186751867618677186781867918680186811868218683186841868518686186871868818689186901869118692186931869418695186961869718698186991870018701187021870318704187051870618707187081870918710187111871218713187141871518716187171871818719187201872118722187231872418725187261872718728187291873018731187321873318734187351873618737187381873918740187411874218743187441874518746187471874818749187501875118752187531875418755187561875718758187591876018761187621876318764187651876618767187681876918770187711877218773187741877518776187771877818779187801878118782187831878418785187861878718788187891879018791187921879318794187951879618797187981879918800188011880218803188041880518806188071880818809188101881118812188131881418815188161881718818188191882018821188221882318824188251882618827188281882918830188311883218833188341883518836188371883818839188401884118842188431884418845188461884718848188491885018851188521885318854188551885618857188581885918860188611886218863188641886518866188671886818869188701887118872188731887418875188761887718878188791888018881188821888318884188851888618887188881888918890188911889218893188941889518896188971889818899189001890118902189031890418905189061890718908189091891018911189121891318914189151891618917189181891918920189211892218923189241892518926189271892818929189301893118932189331893418935189361893718938189391894018941189421894318944189451894618947189481894918950189511895218953189541895518956189571895818959189601896118962189631896418965189661896718968189691897018971189721897318974189751897618977189781897918980189811898218983189841898518986189871898818989189901899118992189931899418995189961899718998189991900019001190021900319004190051900619007190081900919010190111901219013190141901519016190171901819019190201902119022190231902419025190261902719028190291903019031190321903319034190351903619037190381903919040190411904219043190441904519046190471904819049190501905119052190531905419055190561905719058190591906019061190621906319064190651906619067190681906919070190711907219073190741907519076190771907819079190801908119082190831908419085190861908719088190891909019091190921909319094190951909619097190981909919100191011910219103191041910519106191071910819109191101911119112191131911419115191161911719118191191912019121191221912319124191251912619127191281912919130191311913219133191341913519136191371913819139191401914119142191431914419145191461914719148191491915019151191521915319154191551915619157191581915919160191611916219163191641916519166191671916819169191701917119172191731917419175191761917719178191791918019181191821918319184191851918619187191881918919190191911919219193191941919519196191971919819199192001920119202192031920419205192061920719208192091921019211192121921319214192151921619217192181921919220192211922219223192241922519226192271922819229192301923119232192331923419235192361923719238192391924019241192421924319244192451924619247192481924919250192511925219253192541925519256192571925819259192601926119262192631926419265192661926719268192691927019271192721927319274192751927619277192781927919280192811928219283192841928519286192871928819289192901929119292192931929419295192961929719298192991930019301193021930319304193051930619307193081930919310193111931219313193141931519316193171931819319193201932119322193231932419325193261932719328193291933019331193321933319334193351933619337193381933919340193411934219343193441934519346193471934819349193501935119352193531935419355193561935719358193591936019361193621936319364193651936619367193681936919370193711937219373193741937519376193771937819379193801938119382193831938419385193861938719388193891939019391193921939319394193951939619397193981939919400194011940219403194041940519406194071940819409194101941119412194131941419415194161941719418194191942019421194221942319424194251942619427194281942919430194311943219433194341943519436194371943819439194401944119442194431944419445194461944719448194491945019451194521945319454194551945619457194581945919460194611946219463194641946519466194671946819469194701947119472194731947419475194761947719478194791948019481194821948319484194851948619487194881948919490194911949219493194941949519496194971949819499195001950119502195031950419505195061950719508195091951019511195121951319514195151951619517195181951919520195211952219523195241952519526195271952819529195301953119532195331953419535195361953719538195391954019541195421954319544195451954619547195481954919550195511955219553195541955519556195571955819559195601956119562195631956419565195661956719568195691957019571195721957319574195751957619577195781957919580195811958219583195841958519586195871958819589195901959119592195931959419595195961959719598195991960019601196021960319604196051960619607196081960919610196111961219613196141961519616196171961819619196201962119622196231962419625196261962719628196291963019631196321963319634196351963619637196381963919640196411964219643196441964519646196471964819649196501965119652196531965419655196561965719658196591966019661196621966319664196651966619667196681966919670196711967219673196741967519676196771967819679196801968119682196831968419685196861968719688196891969019691196921969319694196951969619697196981969919700197011970219703197041970519706197071970819709197101971119712197131971419715197161971719718197191972019721197221972319724197251972619727197281972919730197311973219733197341973519736197371973819739197401974119742197431974419745197461974719748197491975019751197521975319754197551975619757197581975919760197611976219763197641976519766197671976819769197701977119772197731977419775197761977719778197791978019781197821978319784197851978619787197881978919790197911979219793197941979519796197971979819799198001980119802198031980419805198061980719808198091981019811198121981319814198151981619817198181981919820198211982219823198241982519826198271982819829198301983119832198331983419835198361983719838198391984019841198421984319844198451984619847198481984919850198511985219853198541985519856198571985819859198601986119862198631986419865198661986719868198691987019871198721987319874198751987619877198781987919880198811988219883198841988519886198871988819889198901989119892198931989419895198961989719898198991990019901199021990319904199051990619907199081990919910199111991219913199141991519916199171991819919199201992119922199231992419925199261992719928199291993019931199321993319934199351993619937199381993919940199411994219943199441994519946199471994819949199501995119952199531995419955199561995719958199591996019961199621996319964199651996619967199681996919970199711997219973199741997519976199771997819979199801998119982199831998419985199861998719988199891999019991199921999319994199951999619997199981999920000200012000220003200042000520006200072000820009200102001120012200132001420015200162001720018200192002020021200222002320024200252002620027200282002920030200312003220033200342003520036200372003820039200402004120042200432004420045200462004720048200492005020051200522005320054200552005620057200582005920060200612006220063200642006520066200672006820069200702007120072200732007420075200762007720078200792008020081200822008320084200852008620087200882008920090200912009220093200942009520096200972009820099201002010120102201032010420105201062010720108201092011020111201122011320114201152011620117201182011920120201212012220123201242012520126201272012820129201302013120132201332013420135201362013720138201392014020141201422014320144201452014620147201482014920150201512015220153201542015520156201572015820159201602016120162201632016420165201662016720168201692017020171201722017320174201752017620177201782017920180201812018220183201842018520186201872018820189201902019120192201932019420195201962019720198201992020020201202022020320204202052020620207202082020920210202112021220213202142021520216202172021820219202202022120222202232022420225202262022720228202292023020231202322023320234202352023620237202382023920240202412024220243202442024520246202472024820249202502025120252202532025420255202562025720258202592026020261202622026320264202652026620267202682026920270202712027220273202742027520276202772027820279202802028120282202832028420285202862028720288202892029020291202922029320294202952029620297202982029920300203012030220303203042030520306203072030820309203102031120312203132031420315203162031720318203192032020321203222032320324203252032620327203282032920330203312033220333203342033520336203372033820339203402034120342203432034420345203462034720348203492035020351203522035320354203552035620357203582035920360203612036220363203642036520366203672036820369203702037120372203732037420375203762037720378203792038020381203822038320384203852038620387203882038920390203912039220393203942039520396203972039820399204002040120402204032040420405204062040720408204092041020411204122041320414204152041620417204182041920420204212042220423204242042520426204272042820429204302043120432204332043420435204362043720438204392044020441204422044320444204452044620447204482044920450204512045220453204542045520456204572045820459204602046120462204632046420465204662046720468204692047020471204722047320474204752047620477204782047920480204812048220483204842048520486204872048820489204902049120492204932049420495204962049720498204992050020501205022050320504205052050620507205082050920510205112051220513205142051520516205172051820519205202052120522205232052420525205262052720528205292053020531205322053320534205352053620537205382053920540205412054220543205442054520546205472054820549205502055120552205532055420555205562055720558205592056020561205622056320564205652056620567205682056920570205712057220573205742057520576205772057820579205802058120582205832058420585205862058720588205892059020591205922059320594205952059620597205982059920600206012060220603206042060520606206072060820609206102061120612206132061420615206162061720618206192062020621206222062320624206252062620627206282062920630206312063220633206342063520636206372063820639206402064120642206432064420645206462064720648206492065020651206522065320654206552065620657206582065920660206612066220663206642066520666206672066820669206702067120672206732067420675206762067720678206792068020681206822068320684206852068620687206882068920690206912069220693206942069520696206972069820699207002070120702207032070420705207062070720708207092071020711207122071320714207152071620717207182071920720207212072220723207242072520726207272072820729207302073120732207332073420735207362073720738207392074020741207422074320744207452074620747207482074920750207512075220753207542075520756207572075820759207602076120762207632076420765207662076720768207692077020771207722077320774207752077620777207782077920780207812078220783207842078520786207872078820789207902079120792207932079420795207962079720798207992080020801208022080320804208052080620807208082080920810208112081220813208142081520816208172081820819208202082120822208232082420825208262082720828208292083020831208322083320834208352083620837208382083920840208412084220843208442084520846208472084820849208502085120852208532085420855208562085720858208592086020861208622086320864208652086620867208682086920870208712087220873208742087520876208772087820879208802088120882208832088420885208862088720888208892089020891208922089320894208952089620897208982089920900209012090220903209042090520906209072090820909209102091120912209132091420915209162091720918209192092020921209222092320924209252092620927209282092920930209312093220933209342093520936209372093820939209402094120942209432094420945209462094720948209492095020951209522095320954209552095620957209582095920960209612096220963209642096520966209672096820969209702097120972209732097420975209762097720978209792098020981209822098320984209852098620987209882098920990209912099220993209942099520996209972099820999210002100121002210032100421005210062100721008210092101021011210122101321014210152101621017210182101921020210212102221023210242102521026210272102821029210302103121032210332103421035210362103721038210392104021041210422104321044210452104621047210482104921050210512105221053210542105521056210572105821059210602106121062210632106421065210662106721068210692107021071210722107321074210752107621077210782107921080210812108221083210842108521086210872108821089210902109121092210932109421095210962109721098210992110021101211022110321104211052110621107211082110921110211112111221113211142111521116211172111821119211202112121122211232112421125211262112721128211292113021131211322113321134211352113621137211382113921140211412114221143211442114521146211472114821149211502115121152211532115421155211562115721158211592116021161211622116321164211652116621167211682116921170211712117221173211742117521176211772117821179211802118121182211832118421185211862118721188211892119021191211922119321194211952119621197211982119921200212012120221203212042120521206212072120821209212102121121212212132121421215212162121721218212192122021221212222122321224212252122621227212282122921230212312123221233212342123521236212372123821239212402124121242212432124421245212462124721248212492125021251212522125321254212552125621257212582125921260212612126221263212642126521266212672126821269212702127121272212732127421275212762127721278212792128021281212822128321284212852128621287212882128921290212912129221293212942129521296212972129821299213002130121302213032130421305213062130721308213092131021311213122131321314213152131621317213182131921320213212132221323213242132521326213272132821329213302133121332213332133421335213362133721338213392134021341213422134321344213452134621347213482134921350213512135221353213542135521356213572135821359213602136121362213632136421365213662136721368213692137021371213722137321374213752137621377213782137921380213812138221383213842138521386213872138821389213902139121392213932139421395213962139721398213992140021401214022140321404214052140621407214082140921410214112141221413214142141521416214172141821419214202142121422214232142421425214262142721428214292143021431214322143321434214352143621437214382143921440214412144221443214442144521446214472144821449214502145121452214532145421455214562145721458214592146021461214622146321464214652146621467214682146921470214712147221473214742147521476214772147821479214802148121482214832148421485214862148721488214892149021491214922149321494214952149621497214982149921500215012150221503215042150521506215072150821509215102151121512215132151421515215162151721518215192152021521215222152321524215252152621527215282152921530215312153221533215342153521536215372153821539215402154121542215432154421545215462154721548215492155021551215522155321554215552155621557215582155921560215612156221563215642156521566215672156821569215702157121572215732157421575215762157721578215792158021581215822158321584215852158621587215882158921590215912159221593215942159521596215972159821599216002160121602216032160421605216062160721608216092161021611216122161321614216152161621617216182161921620216212162221623216242162521626216272162821629216302163121632216332163421635216362163721638216392164021641216422164321644216452164621647216482164921650216512165221653216542165521656216572165821659216602166121662216632166421665216662166721668216692167021671216722167321674216752167621677216782167921680216812168221683216842168521686216872168821689216902169121692216932169421695216962169721698216992170021701217022170321704217052170621707217082170921710217112171221713217142171521716217172171821719217202172121722217232172421725217262172721728217292173021731217322173321734217352173621737217382173921740217412174221743217442174521746217472174821749217502175121752217532175421755217562175721758217592176021761217622176321764217652176621767217682176921770217712177221773217742177521776217772177821779217802178121782217832178421785217862178721788217892179021791217922179321794217952179621797217982179921800218012180221803218042180521806218072180821809218102181121812218132181421815218162181721818218192182021821218222182321824218252182621827218282182921830218312183221833218342183521836218372183821839218402184121842218432184421845218462184721848218492185021851218522185321854218552185621857218582185921860218612186221863218642186521866218672186821869218702187121872218732187421875218762187721878218792188021881218822188321884218852188621887218882188921890218912189221893218942189521896218972189821899219002190121902219032190421905219062190721908219092191021911219122191321914219152191621917219182191921920219212192221923219242192521926219272192821929219302193121932219332193421935219362193721938219392194021941219422194321944219452194621947219482194921950219512195221953219542195521956219572195821959219602196121962219632196421965219662196721968219692197021971219722197321974219752197621977219782197921980219812198221983219842198521986219872198821989219902199121992219932199421995219962199721998219992200022001220022200322004220052200622007220082200922010220112201222013220142201522016220172201822019220202202122022220232202422025220262202722028220292203022031220322203322034220352203622037220382203922040220412204222043220442204522046220472204822049220502205122052220532205422055220562205722058220592206022061220622206322064220652206622067220682206922070220712207222073220742207522076220772207822079220802208122082220832208422085220862208722088220892209022091220922209322094220952209622097220982209922100221012210222103221042210522106221072210822109221102211122112221132211422115221162211722118221192212022121221222212322124221252212622127221282212922130221312213222133221342213522136221372213822139221402214122142221432214422145221462214722148221492215022151221522215322154221552215622157221582215922160221612216222163221642216522166221672216822169221702217122172221732217422175221762217722178221792218022181221822218322184221852218622187221882218922190221912219222193221942219522196221972219822199222002220122202222032220422205222062220722208222092221022211222122221322214222152221622217222182221922220222212222222223222242222522226222272222822229222302223122232222332223422235222362223722238222392224022241222422224322244222452224622247222482224922250222512225222253222542225522256222572225822259222602226122262222632226422265222662226722268222692227022271222722227322274222752227622277222782227922280222812228222283222842228522286222872228822289222902229122292222932229422295222962229722298222992230022301223022230322304223052230622307223082230922310223112231222313223142231522316223172231822319223202232122322223232232422325223262232722328223292233022331223322233322334223352233622337223382233922340223412234222343223442234522346223472234822349223502235122352223532235422355223562235722358223592236022361223622236322364223652236622367223682236922370223712237222373223742237522376223772237822379223802238122382223832238422385223862238722388223892239022391223922239322394223952239622397223982239922400224012240222403224042240522406224072240822409224102241122412224132241422415224162241722418224192242022421224222242322424224252242622427224282242922430224312243222433224342243522436224372243822439224402244122442224432244422445224462244722448224492245022451224522245322454224552245622457224582245922460224612246222463224642246522466224672246822469224702247122472224732247422475224762247722478224792248022481224822248322484224852248622487224882248922490224912249222493224942249522496224972249822499225002250122502225032250422505225062250722508225092251022511225122251322514225152251622517225182251922520225212252222523225242252522526225272252822529225302253122532225332253422535225362253722538225392254022541225422254322544225452254622547225482254922550225512255222553225542255522556225572255822559225602256122562225632256422565225662256722568225692257022571225722257322574225752257622577225782257922580225812258222583225842258522586225872258822589225902259122592225932259422595225962259722598225992260022601226022260322604226052260622607226082260922610226112261222613226142261522616226172261822619226202262122622226232262422625226262262722628226292263022631226322263322634226352263622637226382263922640226412264222643226442264522646226472264822649226502265122652226532265422655226562265722658226592266022661226622266322664226652266622667226682266922670226712267222673226742267522676226772267822679226802268122682226832268422685226862268722688226892269022691226922269322694226952269622697226982269922700227012270222703227042270522706227072270822709227102271122712227132271422715227162271722718227192272022721227222272322724227252272622727227282272922730227312273222733227342273522736227372273822739227402274122742227432274422745227462274722748227492275022751227522275322754227552275622757227582275922760227612276222763227642276522766227672276822769227702277122772227732277422775227762277722778227792278022781227822278322784227852278622787227882278922790227912279222793227942279522796227972279822799228002280122802228032280422805228062280722808228092281022811228122281322814228152281622817228182281922820228212282222823228242282522826228272282822829228302283122832228332283422835228362283722838228392284022841228422284322844228452284622847228482284922850228512285222853228542285522856228572285822859228602286122862228632286422865228662286722868228692287022871228722287322874228752287622877228782287922880228812288222883228842288522886228872288822889228902289122892228932289422895228962289722898228992290022901229022290322904229052290622907229082290922910229112291222913229142291522916229172291822919229202292122922229232292422925229262292722928229292293022931229322293322934229352293622937229382293922940229412294222943229442294522946229472294822949229502295122952229532295422955229562295722958229592296022961229622296322964229652296622967229682296922970229712297222973229742297522976229772297822979229802298122982229832298422985229862298722988229892299022991229922299322994229952299622997229982299923000230012300223003230042300523006230072300823009230102301123012230132301423015230162301723018230192302023021230222302323024230252302623027230282302923030230312303223033230342303523036230372303823039230402304123042230432304423045230462304723048230492305023051230522305323054230552305623057230582305923060230612306223063230642306523066230672306823069230702307123072230732307423075230762307723078230792308023081230822308323084230852308623087230882308923090230912309223093230942309523096230972309823099231002310123102231032310423105231062310723108231092311023111231122311323114231152311623117231182311923120231212312223123231242312523126231272312823129231302313123132231332313423135231362313723138231392314023141231422314323144231452314623147231482314923150231512315223153231542315523156231572315823159231602316123162231632316423165231662316723168231692317023171231722317323174231752317623177231782317923180231812318223183231842318523186231872318823189231902319123192231932319423195231962319723198231992320023201232022320323204232052320623207232082320923210232112321223213232142321523216232172321823219232202322123222232232322423225232262322723228232292323023231232322323323234232352323623237232382323923240232412324223243232442324523246232472324823249232502325123252232532325423255232562325723258232592326023261232622326323264232652326623267232682326923270232712327223273232742327523276232772327823279232802328123282232832328423285232862328723288232892329023291232922329323294232952329623297232982329923300233012330223303233042330523306233072330823309233102331123312233132331423315233162331723318233192332023321233222332323324233252332623327233282332923330233312333223333233342333523336233372333823339233402334123342233432334423345233462334723348233492335023351233522335323354233552335623357233582335923360233612336223363233642336523366233672336823369233702337123372233732337423375233762337723378233792338023381233822338323384233852338623387233882338923390233912339223393233942339523396233972339823399234002340123402234032340423405234062340723408234092341023411234122341323414234152341623417234182341923420234212342223423234242342523426234272342823429234302343123432234332343423435234362343723438234392344023441234422344323444234452344623447234482344923450234512345223453234542345523456234572345823459234602346123462234632346423465234662346723468234692347023471234722347323474234752347623477234782347923480234812348223483234842348523486234872348823489234902349123492234932349423495234962349723498234992350023501235022350323504235052350623507235082350923510235112351223513235142351523516235172351823519235202352123522235232352423525235262352723528235292353023531235322353323534235352353623537235382353923540235412354223543235442354523546235472354823549235502355123552235532355423555235562355723558235592356023561235622356323564235652356623567235682356923570235712357223573235742357523576235772357823579235802358123582235832358423585235862358723588235892359023591235922359323594235952359623597235982359923600236012360223603236042360523606236072360823609236102361123612236132361423615236162361723618236192362023621236222362323624236252362623627236282362923630236312363223633236342363523636236372363823639236402364123642236432364423645236462364723648236492365023651236522365323654236552365623657236582365923660236612366223663236642366523666236672366823669236702367123672236732367423675236762367723678236792368023681236822368323684236852368623687236882368923690236912369223693236942369523696236972369823699237002370123702237032370423705237062370723708237092371023711237122371323714237152371623717237182371923720237212372223723237242372523726237272372823729237302373123732237332373423735237362373723738237392374023741237422374323744237452374623747237482374923750237512375223753237542375523756237572375823759237602376123762237632376423765237662376723768237692377023771237722377323774237752377623777237782377923780237812378223783237842378523786237872378823789237902379123792237932379423795237962379723798237992380023801238022380323804238052380623807238082380923810238112381223813238142381523816238172381823819238202382123822238232382423825238262382723828238292383023831238322383323834238352383623837238382383923840238412384223843238442384523846238472384823849238502385123852238532385423855238562385723858238592386023861238622386323864238652386623867238682386923870238712387223873238742387523876238772387823879238802388123882238832388423885238862388723888238892389023891238922389323894238952389623897238982389923900239012390223903239042390523906239072390823909239102391123912239132391423915239162391723918239192392023921239222392323924239252392623927239282392923930239312393223933239342393523936239372393823939239402394123942239432394423945239462394723948239492395023951239522395323954239552395623957239582395923960239612396223963239642396523966239672396823969239702397123972239732397423975239762397723978239792398023981239822398323984239852398623987239882398923990239912399223993239942399523996239972399823999240002400124002240032400424005240062400724008240092401024011240122401324014240152401624017240182401924020240212402224023240242402524026240272402824029240302403124032240332403424035240362403724038240392404024041240422404324044240452404624047240482404924050240512405224053240542405524056240572405824059240602406124062240632406424065240662406724068240692407024071240722407324074240752407624077240782407924080240812408224083240842408524086240872408824089240902409124092240932409424095240962409724098240992410024101241022410324104241052410624107241082410924110241112411224113241142411524116241172411824119241202412124122241232412424125241262412724128241292413024131241322413324134241352413624137241382413924140241412414224143241442414524146241472414824149241502415124152241532415424155241562415724158241592416024161241622416324164241652416624167241682416924170241712417224173241742417524176241772417824179241802418124182241832418424185241862418724188241892419024191241922419324194241952419624197241982419924200242012420224203242042420524206242072420824209242102421124212242132421424215242162421724218242192422024221242222422324224242252422624227242282422924230242312423224233242342423524236242372423824239242402424124242242432424424245242462424724248242492425024251242522425324254242552425624257242582425924260242612426224263242642426524266242672426824269242702427124272242732427424275242762427724278242792428024281242822428324284242852428624287242882428924290242912429224293242942429524296242972429824299243002430124302243032430424305243062430724308243092431024311243122431324314243152431624317243182431924320243212432224323243242432524326243272432824329243302433124332243332433424335243362433724338243392434024341243422434324344243452434624347243482434924350243512435224353243542435524356243572435824359243602436124362243632436424365243662436724368243692437024371243722437324374243752437624377243782437924380243812438224383243842438524386243872438824389243902439124392243932439424395243962439724398243992440024401244022440324404244052440624407244082440924410244112441224413244142441524416244172441824419244202442124422244232442424425244262442724428244292443024431244322443324434244352443624437244382443924440244412444224443244442444524446244472444824449244502445124452244532445424455244562445724458244592446024461244622446324464244652446624467244682446924470244712447224473244742447524476244772447824479244802448124482244832448424485244862448724488244892449024491244922449324494244952449624497244982449924500245012450224503245042450524506245072450824509245102451124512245132451424515245162451724518245192452024521245222452324524245252452624527245282452924530245312453224533245342453524536245372453824539245402454124542245432454424545245462454724548245492455024551245522455324554245552455624557245582455924560245612456224563245642456524566245672456824569245702457124572245732457424575245762457724578245792458024581245822458324584245852458624587245882458924590245912459224593245942459524596245972459824599246002460124602246032460424605246062460724608246092461024611246122461324614246152461624617246182461924620246212462224623246242462524626246272462824629246302463124632246332463424635246362463724638246392464024641246422464324644246452464624647246482464924650246512465224653246542465524656246572465824659246602466124662246632466424665246662466724668246692467024671246722467324674246752467624677246782467924680246812468224683246842468524686246872468824689246902469124692246932469424695246962469724698246992470024701247022470324704247052470624707247082470924710247112471224713247142471524716247172471824719247202472124722247232472424725247262472724728247292473024731247322473324734247352473624737247382473924740247412474224743247442474524746247472474824749247502475124752247532475424755247562475724758247592476024761247622476324764247652476624767247682476924770247712477224773247742477524776247772477824779247802478124782247832478424785247862478724788247892479024791247922479324794247952479624797247982479924800248012480224803248042480524806248072480824809248102481124812248132481424815248162481724818248192482024821248222482324824248252482624827248282482924830248312483224833248342483524836248372483824839248402484124842248432484424845248462484724848248492485024851248522485324854248552485624857248582485924860248612486224863248642486524866248672486824869248702487124872248732487424875248762487724878248792488024881248822488324884248852488624887248882488924890248912489224893248942489524896248972489824899249002490124902249032490424905249062490724908249092491024911249122491324914249152491624917249182491924920249212492224923249242492524926249272492824929249302493124932249332493424935249362493724938249392494024941249422494324944249452494624947249482494924950249512495224953249542495524956249572495824959249602496124962249632496424965249662496724968249692497024971249722497324974249752497624977249782497924980249812498224983249842498524986249872498824989249902499124992249932499424995249962499724998249992500025001250022500325004250052500625007250082500925010250112501225013250142501525016250172501825019250202502125022250232502425025250262502725028250292503025031250322503325034250352503625037250382503925040250412504225043250442504525046250472504825049250502505125052250532505425055250562505725058250592506025061250622506325064250652506625067250682506925070250712507225073250742507525076250772507825079250802508125082250832508425085250862508725088250892509025091250922509325094250952509625097250982509925100251012510225103251042510525106251072510825109251102511125112251132511425115251162511725118251192512025121251222512325124251252512625127251282512925130251312513225133251342513525136251372513825139251402514125142251432514425145251462514725148251492515025151251522515325154251552515625157251582515925160251612516225163251642516525166251672516825169251702517125172251732517425175251762517725178251792518025181251822518325184251852518625187251882518925190251912519225193251942519525196251972519825199252002520125202252032520425205252062520725208252092521025211252122521325214252152521625217252182521925220252212522225223252242522525226252272522825229252302523125232252332523425235252362523725238252392524025241252422524325244252452524625247252482524925250252512525225253252542525525256252572525825259252602526125262252632526425265252662526725268252692527025271252722527325274252752527625277252782527925280252812528225283252842528525286252872528825289252902529125292252932529425295252962529725298252992530025301253022530325304253052530625307253082530925310253112531225313253142531525316253172531825319253202532125322253232532425325253262532725328253292533025331253322533325334253352533625337253382533925340253412534225343253442534525346253472534825349253502535125352253532535425355253562535725358253592536025361253622536325364253652536625367253682536925370253712537225373253742537525376253772537825379253802538125382253832538425385253862538725388253892539025391253922539325394253952539625397253982539925400254012540225403254042540525406254072540825409254102541125412254132541425415254162541725418254192542025421254222542325424254252542625427254282542925430254312543225433254342543525436254372543825439254402544125442254432544425445254462544725448254492545025451254522545325454254552545625457254582545925460254612546225463254642546525466254672546825469254702547125472254732547425475254762547725478254792548025481254822548325484254852548625487254882548925490254912549225493254942549525496254972549825499255002550125502255032550425505255062550725508255092551025511255122551325514255152551625517255182551925520255212552225523255242552525526255272552825529255302553125532255332553425535255362553725538255392554025541255422554325544255452554625547255482554925550255512555225553255542555525556255572555825559255602556125562255632556425565255662556725568255692557025571255722557325574255752557625577255782557925580255812558225583255842558525586255872558825589255902559125592255932559425595255962559725598255992560025601256022560325604256052560625607256082560925610256112561225613256142561525616256172561825619256202562125622256232562425625256262562725628256292563025631256322563325634256352563625637256382563925640256412564225643256442564525646256472564825649256502565125652256532565425655256562565725658256592566025661256622566325664256652566625667256682566925670256712567225673256742567525676256772567825679256802568125682256832568425685256862568725688256892569025691256922569325694256952569625697256982569925700257012570225703257042570525706257072570825709257102571125712257132571425715257162571725718257192572025721257222572325724257252572625727257282572925730257312573225733257342573525736257372573825739257402574125742257432574425745257462574725748257492575025751257522575325754257552575625757257582575925760257612576225763257642576525766257672576825769257702577125772257732577425775257762577725778257792578025781257822578325784257852578625787257882578925790257912579225793257942579525796257972579825799258002580125802258032580425805258062580725808258092581025811258122581325814258152581625817258182581925820258212582225823258242582525826258272582825829258302583125832258332583425835258362583725838258392584025841258422584325844258452584625847258482584925850258512585225853258542585525856258572585825859258602586125862258632586425865258662586725868258692587025871258722587325874258752587625877258782587925880258812588225883258842588525886258872588825889258902589125892258932589425895258962589725898258992590025901259022590325904259052590625907259082590925910259112591225913259142591525916259172591825919259202592125922259232592425925259262592725928259292593025931259322593325934259352593625937259382593925940259412594225943259442594525946259472594825949259502595125952259532595425955259562595725958259592596025961259622596325964259652596625967259682596925970259712597225973259742597525976259772597825979259802598125982259832598425985259862598725988259892599025991259922599325994259952599625997259982599926000260012600226003260042600526006260072600826009260102601126012260132601426015260162601726018260192602026021260222602326024260252602626027260282602926030260312603226033260342603526036260372603826039260402604126042260432604426045260462604726048260492605026051260522605326054260552605626057260582605926060260612606226063260642606526066260672606826069260702607126072260732607426075260762607726078260792608026081260822608326084260852608626087260882608926090260912609226093260942609526096260972609826099261002610126102261032610426105261062610726108261092611026111261122611326114261152611626117261182611926120261212612226123261242612526126261272612826129261302613126132261332613426135261362613726138261392614026141261422614326144261452614626147261482614926150261512615226153261542615526156261572615826159261602616126162261632616426165261662616726168261692617026171261722617326174261752617626177261782617926180261812618226183261842618526186261872618826189261902619126192261932619426195261962619726198261992620026201262022620326204262052620626207262082620926210262112621226213262142621526216262172621826219262202622126222262232622426225262262622726228262292623026231262322623326234262352623626237262382623926240262412624226243262442624526246262472624826249262502625126252262532625426255262562625726258262592626026261262622626326264262652626626267262682626926270262712627226273262742627526276262772627826279262802628126282262832628426285262862628726288262892629026291262922629326294262952629626297262982629926300263012630226303263042630526306263072630826309263102631126312263132631426315263162631726318263192632026321263222632326324263252632626327263282632926330263312633226333263342633526336263372633826339263402634126342263432634426345263462634726348263492635026351263522635326354263552635626357263582635926360263612636226363263642636526366263672636826369263702637126372263732637426375263762637726378263792638026381263822638326384263852638626387263882638926390263912639226393263942639526396263972639826399264002640126402264032640426405264062640726408264092641026411264122641326414264152641626417264182641926420264212642226423264242642526426264272642826429264302643126432264332643426435264362643726438264392644026441264422644326444264452644626447264482644926450264512645226453264542645526456264572645826459264602646126462264632646426465264662646726468264692647026471264722647326474264752647626477264782647926480264812648226483264842648526486264872648826489264902649126492264932649426495264962649726498264992650026501265022650326504265052650626507265082650926510265112651226513265142651526516265172651826519265202652126522265232652426525265262652726528265292653026531265322653326534265352653626537265382653926540265412654226543265442654526546265472654826549265502655126552265532655426555265562655726558265592656026561265622656326564265652656626567265682656926570265712657226573265742657526576265772657826579265802658126582265832658426585265862658726588265892659026591265922659326594265952659626597265982659926600266012660226603266042660526606266072660826609266102661126612266132661426615266162661726618266192662026621266222662326624266252662626627266282662926630266312663226633266342663526636266372663826639266402664126642266432664426645266462664726648266492665026651266522665326654266552665626657266582665926660266612666226663266642666526666266672666826669266702667126672266732667426675266762667726678266792668026681266822668326684266852668626687266882668926690266912669226693266942669526696266972669826699267002670126702267032670426705267062670726708267092671026711267122671326714267152671626717267182671926720267212672226723267242672526726267272672826729267302673126732267332673426735267362673726738267392674026741267422674326744267452674626747267482674926750267512675226753267542675526756267572675826759267602676126762267632676426765267662676726768267692677026771267722677326774267752677626777267782677926780267812678226783267842678526786267872678826789267902679126792267932679426795267962679726798267992680026801268022680326804268052680626807268082680926810268112681226813268142681526816268172681826819268202682126822268232682426825268262682726828268292683026831268322683326834268352683626837268382683926840268412684226843268442684526846268472684826849268502685126852268532685426855268562685726858268592686026861268622686326864268652686626867268682686926870268712687226873268742687526876268772687826879268802688126882268832688426885268862688726888268892689026891268922689326894268952689626897268982689926900269012690226903269042690526906269072690826909269102691126912269132691426915269162691726918269192692026921269222692326924269252692626927269282692926930269312693226933269342693526936269372693826939269402694126942269432694426945269462694726948269492695026951269522695326954269552695626957269582695926960269612696226963269642696526966269672696826969269702697126972269732697426975269762697726978269792698026981269822698326984269852698626987269882698926990269912699226993269942699526996269972699826999270002700127002270032700427005270062700727008270092701027011270122701327014270152701627017270182701927020270212702227023270242702527026270272702827029270302703127032270332703427035270362703727038270392704027041270422704327044270452704627047270482704927050270512705227053270542705527056270572705827059270602706127062270632706427065270662706727068270692707027071270722707327074270752707627077270782707927080270812708227083270842708527086270872708827089270902709127092270932709427095270962709727098270992710027101271022710327104271052710627107271082710927110271112711227113271142711527116271172711827119271202712127122271232712427125271262712727128271292713027131271322713327134271352713627137271382713927140271412714227143271442714527146271472714827149271502715127152271532715427155271562715727158271592716027161271622716327164271652716627167271682716927170271712717227173271742717527176271772717827179271802718127182271832718427185271862718727188271892719027191271922719327194271952719627197271982719927200272012720227203272042720527206272072720827209272102721127212272132721427215272162721727218272192722027221272222722327224272252722627227272282722927230272312723227233272342723527236272372723827239272402724127242272432724427245272462724727248272492725027251272522725327254272552725627257272582725927260272612726227263272642726527266272672726827269272702727127272272732727427275272762727727278272792728027281272822728327284272852728627287272882728927290272912729227293272942729527296272972729827299273002730127302273032730427305273062730727308273092731027311273122731327314273152731627317273182731927320273212732227323273242732527326273272732827329273302733127332273332733427335273362733727338273392734027341273422734327344273452734627347273482734927350273512735227353273542735527356273572735827359273602736127362273632736427365273662736727368273692737027371273722737327374273752737627377273782737927380273812738227383273842738527386273872738827389273902739127392273932739427395273962739727398273992740027401274022740327404274052740627407274082740927410274112741227413274142741527416274172741827419274202742127422274232742427425274262742727428274292743027431274322743327434274352743627437274382743927440274412744227443274442744527446274472744827449274502745127452274532745427455274562745727458274592746027461274622746327464274652746627467274682746927470274712747227473274742747527476274772747827479274802748127482274832748427485274862748727488274892749027491274922749327494274952749627497274982749927500275012750227503275042750527506275072750827509275102751127512275132751427515275162751727518275192752027521275222752327524275252752627527275282752927530275312753227533275342753527536275372753827539275402754127542275432754427545275462754727548275492755027551275522755327554275552755627557275582755927560275612756227563275642756527566275672756827569275702757127572275732757427575275762757727578275792758027581275822758327584275852758627587275882758927590275912759227593275942759527596275972759827599276002760127602276032760427605276062760727608276092761027611276122761327614276152761627617276182761927620276212762227623276242762527626276272762827629276302763127632276332763427635276362763727638276392764027641276422764327644276452764627647276482764927650276512765227653276542765527656276572765827659276602766127662276632766427665276662766727668276692767027671276722767327674276752767627677276782767927680276812768227683276842768527686276872768827689276902769127692276932769427695276962769727698276992770027701277022770327704277052770627707277082770927710277112771227713277142771527716277172771827719277202772127722277232772427725277262772727728277292773027731277322773327734277352773627737277382773927740277412774227743277442774527746277472774827749277502775127752277532775427755277562775727758277592776027761277622776327764277652776627767277682776927770277712777227773277742777527776277772777827779277802778127782277832778427785277862778727788277892779027791277922779327794277952779627797277982779927800278012780227803278042780527806278072780827809278102781127812278132781427815278162781727818278192782027821278222782327824278252782627827278282782927830278312783227833278342783527836278372783827839278402784127842278432784427845278462784727848278492785027851278522785327854278552785627857278582785927860278612786227863278642786527866278672786827869278702787127872278732787427875278762787727878278792788027881278822788327884278852788627887278882788927890278912789227893278942789527896278972789827899279002790127902279032790427905279062790727908279092791027911279122791327914279152791627917279182791927920279212792227923279242792527926279272792827929279302793127932279332793427935279362793727938279392794027941279422794327944279452794627947279482794927950279512795227953279542795527956279572795827959279602796127962279632796427965279662796727968279692797027971279722797327974279752797627977279782797927980279812798227983279842798527986279872798827989279902799127992279932799427995279962799727998279992800028001280022800328004280052800628007280082800928010280112801228013280142801528016280172801828019280202802128022280232802428025280262802728028280292803028031280322803328034280352803628037280382803928040280412804228043280442804528046280472804828049280502805128052280532805428055280562805728058280592806028061280622806328064280652806628067280682806928070280712807228073280742807528076280772807828079280802808128082280832808428085280862808728088280892809028091280922809328094280952809628097280982809928100281012810228103281042810528106281072810828109281102811128112281132811428115281162811728118281192812028121281222812328124281252812628127281282812928130281312813228133281342813528136281372813828139281402814128142281432814428145281462814728148281492815028151281522815328154281552815628157281582815928160281612816228163281642816528166281672816828169281702817128172281732817428175281762817728178281792818028181281822818328184281852818628187281882818928190281912819228193281942819528196281972819828199282002820128202282032820428205282062820728208282092821028211282122821328214282152821628217282182821928220282212822228223282242822528226282272822828229282302823128232282332823428235282362823728238282392824028241282422824328244282452824628247282482824928250282512825228253282542825528256282572825828259282602826128262282632826428265282662826728268282692827028271282722827328274282752827628277282782827928280282812828228283282842828528286282872828828289282902829128292282932829428295282962829728298282992830028301283022830328304283052830628307283082830928310283112831228313283142831528316283172831828319283202832128322283232832428325283262832728328283292833028331283322833328334283352833628337283382833928340283412834228343283442834528346283472834828349283502835128352283532835428355283562835728358283592836028361283622836328364283652836628367283682836928370283712837228373283742837528376283772837828379283802838128382283832838428385283862838728388283892839028391283922839328394283952839628397283982839928400284012840228403284042840528406284072840828409284102841128412284132841428415284162841728418284192842028421284222842328424284252842628427284282842928430284312843228433284342843528436284372843828439284402844128442284432844428445284462844728448284492845028451284522845328454284552845628457284582845928460284612846228463284642846528466284672846828469284702847128472284732847428475284762847728478284792848028481284822848328484284852848628487284882848928490284912849228493284942849528496284972849828499285002850128502285032850428505285062850728508285092851028511285122851328514285152851628517285182851928520285212852228523285242852528526285272852828529285302853128532285332853428535285362853728538285392854028541285422854328544285452854628547285482854928550285512855228553285542855528556285572855828559285602856128562285632856428565285662856728568285692857028571285722857328574285752857628577285782857928580285812858228583285842858528586285872858828589285902859128592285932859428595285962859728598285992860028601286022860328604286052860628607286082860928610286112861228613286142861528616286172861828619286202862128622286232862428625286262862728628286292863028631286322863328634286352863628637286382863928640286412864228643286442864528646286472864828649286502865128652286532865428655286562865728658286592866028661286622866328664286652866628667286682866928670286712867228673286742867528676286772867828679286802868128682286832868428685286862868728688286892869028691286922869328694286952869628697286982869928700287012870228703287042870528706287072870828709287102871128712287132871428715287162871728718287192872028721287222872328724287252872628727287282872928730287312873228733287342873528736287372873828739287402874128742287432874428745287462874728748287492875028751287522875328754287552875628757287582875928760287612876228763287642876528766287672876828769287702877128772287732877428775287762877728778287792878028781287822878328784287852878628787287882878928790287912879228793287942879528796287972879828799288002880128802288032880428805288062880728808288092881028811288122881328814288152881628817288182881928820288212882228823288242882528826288272882828829288302883128832288332883428835288362883728838288392884028841288422884328844288452884628847288482884928850288512885228853288542885528856288572885828859288602886128862288632886428865288662886728868288692887028871288722887328874288752887628877288782887928880288812888228883288842888528886288872888828889288902889128892288932889428895288962889728898288992890028901289022890328904289052890628907289082890928910289112891228913289142891528916289172891828919289202892128922289232892428925289262892728928289292893028931289322893328934289352893628937289382893928940289412894228943289442894528946289472894828949289502895128952289532895428955289562895728958289592896028961289622896328964289652896628967289682896928970289712897228973289742897528976289772897828979289802898128982289832898428985289862898728988289892899028991289922899328994289952899628997289982899929000290012900229003290042900529006290072900829009290102901129012290132901429015290162901729018290192902029021290222902329024290252902629027290282902929030290312903229033290342903529036290372903829039290402904129042290432904429045290462904729048290492905029051290522905329054290552905629057290582905929060290612906229063290642906529066290672906829069290702907129072290732907429075290762907729078290792908029081290822908329084290852908629087290882908929090290912909229093290942909529096290972909829099291002910129102291032910429105291062910729108291092911029111291122911329114291152911629117291182911929120291212912229123291242912529126291272912829129291302913129132291332913429135291362913729138291392914029141291422914329144291452914629147291482914929150291512915229153291542915529156291572915829159291602916129162291632916429165291662916729168291692917029171291722917329174291752917629177291782917929180291812918229183291842918529186291872918829189291902919129192291932919429195291962919729198291992920029201292022920329204292052920629207292082920929210292112921229213292142921529216292172921829219292202922129222292232922429225292262922729228292292923029231292322923329234292352923629237292382923929240292412924229243292442924529246292472924829249292502925129252292532925429255292562925729258292592926029261292622926329264292652926629267292682926929270292712927229273292742927529276292772927829279292802928129282292832928429285292862928729288292892929029291292922929329294292952929629297292982929929300293012930229303293042930529306293072930829309293102931129312293132931429315293162931729318293192932029321293222932329324293252932629327293282932929330293312933229333293342933529336293372933829339293402934129342293432934429345293462934729348293492935029351293522935329354293552935629357293582935929360293612936229363293642936529366293672936829369293702937129372293732937429375293762937729378293792938029381293822938329384293852938629387293882938929390293912939229393293942939529396293972939829399294002940129402294032940429405294062940729408294092941029411294122941329414294152941629417294182941929420294212942229423294242942529426294272942829429294302943129432294332943429435294362943729438294392944029441294422944329444294452944629447294482944929450294512945229453294542945529456294572945829459294602946129462294632946429465294662946729468294692947029471294722947329474294752947629477294782947929480294812948229483294842948529486294872948829489294902949129492294932949429495294962949729498294992950029501295022950329504295052950629507295082950929510295112951229513295142951529516295172951829519295202952129522295232952429525295262952729528295292953029531295322953329534295352953629537295382953929540295412954229543295442954529546295472954829549295502955129552295532955429555295562955729558295592956029561295622956329564295652956629567295682956929570295712957229573295742957529576295772957829579295802958129582295832958429585295862958729588295892959029591295922959329594295952959629597295982959929600296012960229603296042960529606296072960829609296102961129612296132961429615296162961729618296192962029621296222962329624296252962629627296282962929630296312963229633296342963529636296372963829639296402964129642296432964429645296462964729648296492965029651296522965329654296552965629657296582965929660296612966229663296642966529666296672966829669296702967129672296732967429675296762967729678296792968029681296822968329684296852968629687296882968929690296912969229693296942969529696296972969829699297002970129702297032970429705297062970729708297092971029711297122971329714297152971629717297182971929720297212972229723297242972529726297272972829729297302973129732297332973429735297362973729738297392974029741297422974329744297452974629747297482974929750297512975229753297542975529756297572975829759297602976129762297632976429765297662976729768297692977029771297722977329774297752977629777297782977929780297812978229783297842978529786297872978829789297902979129792297932979429795297962979729798297992980029801298022980329804298052980629807298082980929810298112981229813298142981529816298172981829819298202982129822298232982429825298262982729828298292983029831298322983329834298352983629837298382983929840298412984229843298442984529846298472984829849298502985129852298532985429855298562985729858298592986029861298622986329864298652986629867298682986929870298712987229873298742987529876298772987829879298802988129882298832988429885298862988729888298892989029891298922989329894298952989629897298982989929900299012990229903299042990529906299072990829909299102991129912299132991429915299162991729918299192992029921299222992329924299252992629927299282992929930299312993229933299342993529936299372993829939299402994129942299432994429945299462994729948299492995029951299522995329954299552995629957299582995929960299612996229963299642996529966299672996829969299702997129972299732997429975299762997729978299792998029981299822998329984299852998629987299882998929990299912999229993299942999529996299972999829999300003000130002300033000430005300063000730008300093001030011300123001330014300153001630017300183001930020300213002230023300243002530026300273002830029300303003130032300333003430035300363003730038300393004030041300423004330044300453004630047300483004930050300513005230053300543005530056300573005830059300603006130062300633006430065300663006730068300693007030071300723007330074300753007630077300783007930080300813008230083300843008530086300873008830089300903009130092300933009430095300963009730098300993010030101301023010330104301053010630107301083010930110301113011230113301143011530116301173011830119301203012130122301233012430125301263012730128301293013030131301323013330134301353013630137301383013930140301413014230143301443014530146301473014830149301503015130152301533015430155301563015730158301593016030161301623016330164301653016630167301683016930170301713017230173301743017530176301773017830179301803018130182301833018430185301863018730188301893019030191301923019330194301953019630197301983019930200302013020230203302043020530206302073020830209302103021130212302133021430215302163021730218302193022030221302223022330224302253022630227302283022930230302313023230233302343023530236302373023830239302403024130242302433024430245302463024730248302493025030251302523025330254302553025630257302583025930260302613026230263302643026530266302673026830269302703027130272302733027430275302763027730278302793028030281302823028330284302853028630287302883028930290302913029230293302943029530296302973029830299303003030130302303033030430305303063030730308303093031030311303123031330314303153031630317303183031930320303213032230323303243032530326303273032830329303303033130332303333033430335303363033730338303393034030341303423034330344303453034630347303483034930350303513035230353303543035530356303573035830359303603036130362303633036430365303663036730368303693037030371303723037330374303753037630377303783037930380303813038230383303843038530386303873038830389303903039130392303933039430395303963039730398303993040030401304023040330404304053040630407304083040930410304113041230413304143041530416304173041830419304203042130422304233042430425304263042730428304293043030431304323043330434304353043630437304383043930440304413044230443304443044530446304473044830449304503045130452304533045430455304563045730458304593046030461304623046330464304653046630467304683046930470304713047230473304743047530476304773047830479304803048130482304833048430485304863048730488304893049030491304923049330494304953049630497304983049930500305013050230503305043050530506305073050830509305103051130512305133051430515305163051730518305193052030521305223052330524305253052630527305283052930530305313053230533305343053530536305373053830539305403054130542305433054430545305463054730548305493055030551305523055330554305553055630557305583055930560305613056230563305643056530566305673056830569305703057130572305733057430575305763057730578305793058030581305823058330584305853058630587305883058930590305913059230593305943059530596305973059830599306003060130602306033060430605306063060730608306093061030611306123061330614306153061630617306183061930620306213062230623306243062530626306273062830629306303063130632306333063430635306363063730638306393064030641306423064330644306453064630647306483064930650306513065230653306543065530656306573065830659306603066130662306633066430665306663066730668306693067030671306723067330674306753067630677306783067930680306813068230683306843068530686306873068830689306903069130692306933069430695306963069730698306993070030701307023070330704307053070630707307083070930710307113071230713307143071530716307173071830719307203072130722307233072430725307263072730728307293073030731307323073330734307353073630737307383073930740307413074230743307443074530746307473074830749307503075130752307533075430755307563075730758307593076030761307623076330764307653076630767307683076930770307713077230773307743077530776307773077830779307803078130782307833078430785307863078730788307893079030791307923079330794307953079630797307983079930800308013080230803308043080530806308073080830809308103081130812308133081430815308163081730818308193082030821308223082330824308253082630827308283082930830308313083230833308343083530836308373083830839308403084130842308433084430845308463084730848308493085030851308523085330854308553085630857308583085930860308613086230863308643086530866308673086830869308703087130872308733087430875308763087730878308793088030881308823088330884308853088630887308883088930890308913089230893308943089530896308973089830899309003090130902309033090430905309063090730908309093091030911309123091330914309153091630917309183091930920309213092230923309243092530926309273092830929309303093130932309333093430935309363093730938309393094030941309423094330944309453094630947309483094930950309513095230953309543095530956309573095830959309603096130962309633096430965309663096730968309693097030971309723097330974309753097630977309783097930980309813098230983309843098530986309873098830989309903099130992309933099430995309963099730998309993100031001310023100331004310053100631007310083100931010310113101231013310143101531016310173101831019310203102131022310233102431025310263102731028310293103031031310323103331034310353103631037310383103931040310413104231043310443104531046310473104831049310503105131052310533105431055310563105731058310593106031061310623106331064310653106631067310683106931070310713107231073310743107531076310773107831079310803108131082310833108431085310863108731088310893109031091310923109331094310953109631097310983109931100311013110231103311043110531106311073110831109311103111131112311133111431115311163111731118311193112031121311223112331124311253112631127311283112931130311313113231133311343113531136311373113831139311403114131142311433114431145311463114731148311493115031151311523115331154311553115631157311583115931160311613116231163311643116531166311673116831169311703117131172311733117431175311763117731178311793118031181311823118331184311853118631187311883118931190311913119231193311943119531196311973119831199312003120131202312033120431205312063120731208312093121031211312123121331214312153121631217312183121931220312213122231223312243122531226312273122831229312303123131232312333123431235312363123731238312393124031241312423124331244312453124631247312483124931250312513125231253312543125531256312573125831259312603126131262312633126431265312663126731268312693127031271312723127331274312753127631277312783127931280312813128231283312843128531286312873128831289312903129131292312933129431295312963129731298312993130031301313023130331304313053130631307313083130931310313113131231313313143131531316313173131831319313203132131322313233132431325313263132731328313293133031331313323133331334313353133631337313383133931340313413134231343313443134531346313473134831349313503135131352313533135431355313563135731358313593136031361313623136331364313653136631367313683136931370313713137231373313743137531376313773137831379313803138131382313833138431385313863138731388313893139031391313923139331394313953139631397313983139931400314013140231403314043140531406314073140831409314103141131412314133141431415314163141731418314193142031421314223142331424314253142631427314283142931430314313143231433314343143531436314373143831439314403144131442314433144431445314463144731448314493145031451314523145331454314553145631457314583145931460314613146231463314643146531466314673146831469314703147131472314733147431475314763147731478314793148031481314823148331484314853148631487314883148931490314913149231493314943149531496314973149831499315003150131502315033150431505315063150731508315093151031511315123151331514315153151631517315183151931520315213152231523315243152531526315273152831529315303153131532315333153431535315363153731538315393154031541315423154331544315453154631547315483154931550315513155231553315543155531556315573155831559315603156131562315633156431565315663156731568315693157031571315723157331574315753157631577315783157931580315813158231583315843158531586315873158831589315903159131592315933159431595315963159731598315993160031601316023160331604316053160631607316083160931610316113161231613316143161531616316173161831619316203162131622316233162431625316263162731628316293163031631316323163331634316353163631637316383163931640316413164231643316443164531646316473164831649316503165131652316533165431655316563165731658316593166031661316623166331664316653166631667316683166931670316713167231673316743167531676316773167831679316803168131682316833168431685316863168731688316893169031691316923169331694316953169631697316983169931700317013170231703317043170531706317073170831709317103171131712317133171431715317163171731718317193172031721317223172331724317253172631727317283172931730317313173231733317343173531736317373173831739317403174131742317433174431745317463174731748317493175031751317523175331754317553175631757317583175931760317613176231763317643176531766317673176831769317703177131772317733177431775317763177731778317793178031781317823178331784317853178631787317883178931790317913179231793317943179531796317973179831799318003180131802318033180431805318063180731808318093181031811318123181331814318153181631817318183181931820318213182231823318243182531826318273182831829318303183131832318333183431835318363183731838318393184031841318423184331844318453184631847318483184931850318513185231853318543185531856318573185831859318603186131862318633186431865318663186731868318693187031871318723187331874318753187631877318783187931880318813188231883318843188531886318873188831889318903189131892318933189431895318963189731898318993190031901319023190331904319053190631907319083190931910319113191231913319143191531916319173191831919319203192131922319233192431925319263192731928319293193031931319323193331934319353193631937319383193931940319413194231943319443194531946319473194831949319503195131952319533195431955319563195731958319593196031961319623196331964319653196631967319683196931970319713197231973319743197531976319773197831979319803198131982319833198431985319863198731988319893199031991319923199331994319953199631997319983199932000320013200232003320043200532006320073200832009320103201132012320133201432015320163201732018320193202032021320223202332024320253202632027320283202932030320313203232033320343203532036320373203832039320403204132042320433204432045320463204732048320493205032051320523205332054320553205632057320583205932060320613206232063320643206532066320673206832069320703207132072320733207432075320763207732078320793208032081320823208332084320853208632087320883208932090320913209232093320943209532096320973209832099321003210132102321033210432105321063210732108321093211032111321123211332114321153211632117321183211932120321213212232123321243212532126321273212832129321303213132132321333213432135321363213732138321393214032141321423214332144321453214632147321483214932150321513215232153321543215532156321573215832159321603216132162321633216432165321663216732168321693217032171321723217332174321753217632177321783217932180321813218232183321843218532186321873218832189321903219132192321933219432195321963219732198321993220032201322023220332204322053220632207322083220932210322113221232213322143221532216322173221832219322203222132222322233222432225322263222732228322293223032231322323223332234322353223632237322383223932240322413224232243322443224532246322473224832249322503225132252322533225432255322563225732258322593226032261322623226332264322653226632267322683226932270322713227232273322743227532276322773227832279322803228132282322833228432285322863228732288322893229032291322923229332294322953229632297322983229932300323013230232303323043230532306323073230832309323103231132312323133231432315323163231732318323193232032321323223232332324323253232632327323283232932330323313233232333323343233532336323373233832339323403234132342323433234432345323463234732348323493235032351323523235332354323553235632357323583235932360323613236232363323643236532366323673236832369323703237132372323733237432375323763237732378323793238032381323823238332384323853238632387323883238932390323913239232393323943239532396323973239832399324003240132402324033240432405324063240732408324093241032411324123241332414324153241632417324183241932420324213242232423324243242532426324273242832429324303243132432324333243432435324363243732438324393244032441324423244332444324453244632447324483244932450324513245232453324543245532456324573245832459324603246132462324633246432465324663246732468324693247032471324723247332474324753247632477324783247932480324813248232483324843248532486324873248832489324903249132492324933249432495324963249732498324993250032501325023250332504325053250632507325083250932510325113251232513325143251532516325173251832519325203252132522325233252432525325263252732528325293253032531325323253332534325353253632537325383253932540325413254232543325443254532546325473254832549325503255132552325533255432555325563255732558325593256032561325623256332564325653256632567325683256932570325713257232573325743257532576325773257832579325803258132582325833258432585325863258732588325893259032591325923259332594325953259632597325983259932600326013260232603326043260532606326073260832609326103261132612326133261432615326163261732618326193262032621326223262332624326253262632627326283262932630326313263232633326343263532636326373263832639326403264132642326433264432645326463264732648326493265032651326523265332654326553265632657326583265932660326613266232663326643266532666326673266832669326703267132672326733267432675326763267732678326793268032681326823268332684326853268632687326883268932690326913269232693326943269532696326973269832699327003270132702327033270432705327063270732708327093271032711327123271332714327153271632717327183271932720327213272232723327243272532726327273272832729327303273132732327333273432735327363273732738327393274032741327423274332744327453274632747327483274932750327513275232753327543275532756327573275832759327603276132762327633276432765327663276732768327693277032771327723277332774327753277632777327783277932780327813278232783327843278532786327873278832789327903279132792327933279432795327963279732798327993280032801328023280332804328053280632807328083280932810328113281232813328143281532816328173281832819328203282132822328233282432825328263282732828328293283032831328323283332834328353283632837328383283932840328413284232843328443284532846328473284832849328503285132852328533285432855328563285732858328593286032861328623286332864328653286632867328683286932870328713287232873328743287532876328773287832879328803288132882328833288432885328863288732888328893289032891328923289332894328953289632897328983289932900329013290232903329043290532906329073290832909329103291132912329133291432915329163291732918329193292032921329223292332924329253292632927329283292932930329313293232933329343293532936329373293832939329403294132942329433294432945329463294732948329493295032951329523295332954329553295632957329583295932960329613296232963329643296532966329673296832969329703297132972329733297432975329763297732978329793298032981329823298332984329853298632987329883298932990329913299232993329943299532996329973299832999330003300133002330033300433005330063300733008330093301033011330123301333014330153301633017330183301933020330213302233023330243302533026330273302833029330303303133032330333303433035330363303733038330393304033041330423304333044330453304633047330483304933050330513305233053330543305533056330573305833059330603306133062330633306433065330663306733068330693307033071330723307333074330753307633077330783307933080330813308233083330843308533086330873308833089330903309133092330933309433095330963309733098330993310033101331023310333104331053310633107331083310933110331113311233113331143311533116331173311833119331203312133122331233312433125331263312733128331293313033131331323313333134331353313633137331383313933140331413314233143331443314533146331473314833149331503315133152331533315433155331563315733158331593316033161331623316333164331653316633167331683316933170331713317233173331743317533176331773317833179331803318133182331833318433185331863318733188331893319033191331923319333194331953319633197331983319933200332013320233203332043320533206332073320833209332103321133212332133321433215332163321733218332193322033221332223322333224332253322633227332283322933230332313323233233332343323533236332373323833239332403324133242332433324433245332463324733248332493325033251332523325333254332553325633257332583325933260332613326233263332643326533266332673326833269332703327133272332733327433275332763327733278332793328033281332823328333284332853328633287332883328933290332913329233293332943329533296332973329833299333003330133302333033330433305333063330733308333093331033311333123331333314333153331633317333183331933320333213332233323333243332533326333273332833329333303333133332333333333433335333363333733338333393334033341333423334333344333453334633347333483334933350333513335233353333543335533356333573335833359333603336133362333633336433365333663336733368333693337033371333723337333374333753337633377333783337933380333813338233383333843338533386333873338833389333903339133392333933339433395333963339733398333993340033401334023340333404334053340633407334083340933410334113341233413334143341533416334173341833419334203342133422334233342433425334263342733428334293343033431334323343333434334353343633437334383343933440334413344233443334443344533446334473344833449334503345133452334533345433455334563345733458334593346033461334623346333464334653346633467334683346933470334713347233473334743347533476334773347833479334803348133482334833348433485334863348733488334893349033491334923349333494334953349633497334983349933500335013350233503335043350533506335073350833509335103351133512335133351433515335163351733518335193352033521335223352333524335253352633527335283352933530335313353233533335343353533536335373353833539335403354133542335433354433545335463354733548335493355033551335523355333554335553355633557335583355933560335613356233563335643356533566335673356833569335703357133572335733357433575335763357733578335793358033581335823358333584335853358633587335883358933590335913359233593335943359533596335973359833599336003360133602336033360433605336063360733608336093361033611336123361333614336153361633617336183361933620336213362233623336243362533626336273362833629336303363133632336333363433635336363363733638336393364033641336423364333644336453364633647336483364933650336513365233653336543365533656336573365833659336603366133662336633366433665336663366733668336693367033671336723367333674336753367633677336783367933680336813368233683336843368533686336873368833689336903369133692336933369433695336963369733698336993370033701337023370333704337053370633707337083370933710337113371233713337143371533716337173371833719337203372133722337233372433725337263372733728337293373033731337323373333734337353373633737337383373933740337413374233743337443374533746337473374833749337503375133752337533375433755337563375733758337593376033761337623376333764337653376633767337683376933770337713377233773337743377533776337773377833779337803378133782337833378433785337863378733788337893379033791337923379333794337953379633797337983379933800338013380233803338043380533806338073380833809338103381133812338133381433815338163381733818338193382033821338223382333824338253382633827338283382933830338313383233833338343383533836338373383833839338403384133842338433384433845338463384733848338493385033851338523385333854338553385633857338583385933860338613386233863338643386533866338673386833869338703387133872338733387433875338763387733878338793388033881338823388333884338853388633887338883388933890338913389233893338943389533896338973389833899339003390133902339033390433905339063390733908339093391033911339123391333914339153391633917339183391933920339213392233923339243392533926339273392833929339303393133932339333393433935339363393733938339393394033941339423394333944339453394633947339483394933950339513395233953339543395533956339573395833959339603396133962339633396433965339663396733968339693397033971339723397333974339753397633977339783397933980339813398233983339843398533986339873398833989339903399133992339933399433995339963399733998339993400034001340023400334004340053400634007340083400934010340113401234013340143401534016340173401834019340203402134022340233402434025340263402734028340293403034031340323403334034340353403634037340383403934040340413404234043340443404534046340473404834049340503405134052340533405434055340563405734058340593406034061340623406334064340653406634067340683406934070340713407234073340743407534076340773407834079340803408134082340833408434085340863408734088340893409034091340923409334094340953409634097340983409934100341013410234103341043410534106341073410834109341103411134112341133411434115341163411734118341193412034121341223412334124341253412634127341283412934130341313413234133341343413534136341373413834139341403414134142341433414434145341463414734148341493415034151341523415334154341553415634157341583415934160341613416234163341643416534166341673416834169341703417134172341733417434175341763417734178341793418034181341823418334184341853418634187341883418934190341913419234193341943419534196341973419834199342003420134202342033420434205342063420734208342093421034211342123421334214342153421634217342183421934220342213422234223342243422534226342273422834229342303423134232342333423434235342363423734238342393424034241342423424334244342453424634247342483424934250342513425234253342543425534256342573425834259342603426134262342633426434265342663426734268342693427034271342723427334274342753427634277342783427934280342813428234283342843428534286342873428834289342903429134292342933429434295342963429734298342993430034301343023430334304343053430634307343083430934310343113431234313343143431534316343173431834319343203432134322343233432434325343263432734328343293433034331343323433334334343353433634337343383433934340343413434234343343443434534346343473434834349343503435134352343533435434355343563435734358343593436034361343623436334364343653436634367343683436934370343713437234373343743437534376343773437834379343803438134382343833438434385343863438734388343893439034391343923439334394343953439634397343983439934400344013440234403344043440534406344073440834409344103441134412344133441434415344163441734418344193442034421344223442334424344253442634427344283442934430344313443234433344343443534436344373443834439344403444134442344433444434445344463444734448344493445034451344523445334454344553445634457344583445934460344613446234463344643446534466344673446834469344703447134472344733447434475344763447734478344793448034481344823448334484344853448634487344883448934490344913449234493344943449534496344973449834499345003450134502345033450434505345063450734508345093451034511345123451334514345153451634517345183451934520345213452234523345243452534526345273452834529345303453134532345333453434535345363453734538345393454034541345423454334544345453454634547345483454934550345513455234553345543455534556345573455834559345603456134562345633456434565345663456734568345693457034571345723457334574345753457634577345783457934580345813458234583345843458534586345873458834589345903459134592345933459434595345963459734598345993460034601346023460334604346053460634607346083460934610346113461234613346143461534616346173461834619346203462134622346233462434625346263462734628346293463034631346323463334634346353463634637346383463934640346413464234643346443464534646346473464834649346503465134652346533465434655346563465734658346593466034661346623466334664346653466634667346683466934670346713467234673346743467534676346773467834679346803468134682346833468434685346863468734688346893469034691346923469334694346953469634697346983469934700347013470234703347043470534706347073470834709347103471134712347133471434715347163471734718347193472034721347223472334724347253472634727347283472934730347313473234733347343473534736347373473834739347403474134742347433474434745347463474734748347493475034751347523475334754347553475634757347583475934760347613476234763347643476534766347673476834769347703477134772347733477434775347763477734778347793478034781347823478334784347853478634787347883478934790347913479234793347943479534796347973479834799348003480134802348033480434805348063480734808348093481034811348123481334814348153481634817348183481934820348213482234823348243482534826348273482834829348303483134832348333483434835348363483734838348393484034841348423484334844348453484634847348483484934850348513485234853348543485534856348573485834859348603486134862348633486434865348663486734868348693487034871348723487334874348753487634877348783487934880348813488234883348843488534886348873488834889348903489134892348933489434895348963489734898348993490034901349023490334904349053490634907349083490934910349113491234913349143491534916349173491834919349203492134922349233492434925349263492734928349293493034931349323493334934349353493634937349383493934940349413494234943349443494534946349473494834949349503495134952349533495434955349563495734958349593496034961349623496334964349653496634967349683496934970349713497234973349743497534976349773497834979349803498134982349833498434985349863498734988349893499034991349923499334994349953499634997349983499935000350013500235003350043500535006350073500835009350103501135012350133501435015350163501735018350193502035021350223502335024350253502635027350283502935030350313503235033350343503535036350373503835039350403504135042350433504435045350463504735048350493505035051350523505335054350553505635057350583505935060350613506235063350643506535066350673506835069350703507135072350733507435075350763507735078350793508035081350823508335084350853508635087350883508935090350913509235093350943509535096350973509835099351003510135102351033510435105351063510735108351093511035111351123511335114351153511635117351183511935120351213512235123351243512535126351273512835129351303513135132351333513435135351363513735138351393514035141351423514335144351453514635147351483514935150351513515235153351543515535156351573515835159351603516135162351633516435165351663516735168351693517035171351723517335174351753517635177351783517935180351813518235183351843518535186351873518835189351903519135192351933519435195351963519735198351993520035201352023520335204352053520635207352083520935210352113521235213352143521535216352173521835219352203522135222352233522435225352263522735228352293523035231352323523335234352353523635237352383523935240352413524235243352443524535246352473524835249352503525135252352533525435255352563525735258352593526035261352623526335264352653526635267352683526935270352713527235273352743527535276352773527835279352803528135282352833528435285352863528735288352893529035291352923529335294352953529635297352983529935300353013530235303353043530535306353073530835309353103531135312353133531435315353163531735318353193532035321353223532335324353253532635327353283532935330353313533235333353343533535336353373533835339353403534135342353433534435345353463534735348353493535035351353523535335354353553535635357353583535935360353613536235363353643536535366353673536835369353703537135372353733537435375353763537735378353793538035381353823538335384353853538635387353883538935390353913539235393353943539535396353973539835399354003540135402354033540435405354063540735408354093541035411354123541335414354153541635417354183541935420354213542235423354243542535426354273542835429354303543135432354333543435435354363543735438354393544035441354423544335444354453544635447354483544935450354513545235453354543545535456354573545835459354603546135462354633546435465354663546735468354693547035471354723547335474354753547635477354783547935480354813548235483354843548535486354873548835489354903549135492354933549435495354963549735498354993550035501355023550335504355053550635507355083550935510355113551235513355143551535516355173551835519355203552135522355233552435525355263552735528355293553035531355323553335534355353553635537355383553935540355413554235543355443554535546355473554835549355503555135552355533555435555355563555735558355593556035561355623556335564355653556635567355683556935570355713557235573355743557535576355773557835579355803558135582355833558435585355863558735588355893559035591355923559335594355953559635597355983559935600356013560235603356043560535606356073560835609356103561135612356133561435615356163561735618356193562035621356223562335624356253562635627356283562935630356313563235633356343563535636356373563835639356403564135642356433564435645356463564735648356493565035651356523565335654356553565635657356583565935660356613566235663356643566535666356673566835669356703567135672356733567435675356763567735678356793568035681356823568335684356853568635687356883568935690356913569235693356943569535696356973569835699357003570135702357033570435705357063570735708357093571035711357123571335714357153571635717357183571935720357213572235723357243572535726357273572835729357303573135732357333573435735357363573735738357393574035741357423574335744357453574635747357483574935750357513575235753357543575535756357573575835759357603576135762357633576435765357663576735768357693577035771357723577335774357753577635777357783577935780357813578235783357843578535786357873578835789357903579135792357933579435795357963579735798357993580035801358023580335804358053580635807358083580935810358113581235813358143581535816358173581835819358203582135822358233582435825358263582735828358293583035831358323583335834358353583635837358383583935840358413584235843358443584535846358473584835849358503585135852358533585435855358563585735858358593586035861358623586335864358653586635867358683586935870358713587235873358743587535876358773587835879358803588135882358833588435885358863588735888358893589035891358923589335894358953589635897358983589935900359013590235903359043590535906359073590835909359103591135912359133591435915359163591735918359193592035921359223592335924359253592635927359283592935930359313593235933359343593535936359373593835939359403594135942359433594435945359463594735948359493595035951359523595335954359553595635957359583595935960359613596235963359643596535966359673596835969359703597135972359733597435975359763597735978359793598035981359823598335984359853598635987359883598935990359913599235993359943599535996359973599835999360003600136002360033600436005360063600736008360093601036011360123601336014360153601636017360183601936020360213602236023360243602536026360273602836029360303603136032360333603436035360363603736038360393604036041360423604336044360453604636047360483604936050360513605236053360543605536056360573605836059360603606136062360633606436065360663606736068360693607036071360723607336074360753607636077360783607936080360813608236083360843608536086360873608836089360903609136092360933609436095360963609736098360993610036101361023610336104361053610636107361083610936110361113611236113361143611536116361173611836119361203612136122361233612436125361263612736128361293613036131361323613336134361353613636137361383613936140361413614236143361443614536146361473614836149361503615136152361533615436155361563615736158361593616036161361623616336164361653616636167361683616936170361713617236173361743617536176361773617836179361803618136182361833618436185361863618736188361893619036191361923619336194361953619636197361983619936200362013620236203362043620536206362073620836209362103621136212362133621436215362163621736218362193622036221362223622336224362253622636227362283622936230362313623236233362343623536236362373623836239362403624136242362433624436245362463624736248362493625036251362523625336254362553625636257362583625936260362613626236263362643626536266362673626836269362703627136272362733627436275362763627736278362793628036281362823628336284362853628636287362883628936290362913629236293362943629536296362973629836299363003630136302363033630436305363063630736308363093631036311363123631336314363153631636317363183631936320363213632236323363243632536326363273632836329363303633136332363333633436335363363633736338363393634036341363423634336344363453634636347363483634936350363513635236353363543635536356363573635836359363603636136362363633636436365363663636736368363693637036371363723637336374363753637636377363783637936380363813638236383363843638536386363873638836389363903639136392363933639436395363963639736398363993640036401364023640336404364053640636407364083640936410364113641236413364143641536416364173641836419364203642136422364233642436425364263642736428364293643036431364323643336434364353643636437364383643936440364413644236443364443644536446364473644836449364503645136452364533645436455364563645736458364593646036461364623646336464364653646636467364683646936470364713647236473364743647536476364773647836479364803648136482364833648436485364863648736488364893649036491364923649336494364953649636497364983649936500365013650236503365043650536506365073650836509365103651136512365133651436515365163651736518365193652036521365223652336524365253652636527365283652936530365313653236533365343653536536365373653836539365403654136542365433654436545365463654736548365493655036551365523655336554365553655636557365583655936560365613656236563365643656536566365673656836569365703657136572365733657436575365763657736578365793658036581365823658336584365853658636587365883658936590365913659236593365943659536596365973659836599366003660136602366033660436605366063660736608366093661036611366123661336614366153661636617366183661936620366213662236623366243662536626366273662836629366303663136632366333663436635366363663736638366393664036641366423664336644366453664636647366483664936650366513665236653366543665536656366573665836659366603666136662366633666436665366663666736668366693667036671366723667336674366753667636677366783667936680366813668236683366843668536686366873668836689366903669136692366933669436695366963669736698366993670036701367023670336704367053670636707367083670936710367113671236713367143671536716367173671836719367203672136722367233672436725367263672736728367293673036731367323673336734367353673636737367383673936740367413674236743367443674536746367473674836749367503675136752367533675436755367563675736758367593676036761367623676336764367653676636767367683676936770367713677236773367743677536776367773677836779367803678136782367833678436785367863678736788367893679036791367923679336794367953679636797367983679936800368013680236803368043680536806368073680836809368103681136812368133681436815368163681736818368193682036821368223682336824368253682636827368283682936830368313683236833368343683536836368373683836839368403684136842368433684436845368463684736848368493685036851368523685336854368553685636857368583685936860368613686236863368643686536866368673686836869368703687136872368733687436875368763687736878368793688036881368823688336884368853688636887368883688936890368913689236893368943689536896368973689836899369003690136902369033690436905369063690736908369093691036911369123691336914369153691636917369183691936920369213692236923369243692536926369273692836929369303693136932369333693436935369363693736938369393694036941369423694336944369453694636947369483694936950369513695236953369543695536956369573695836959369603696136962369633696436965369663696736968369693697036971369723697336974369753697636977369783697936980369813698236983369843698536986369873698836989369903699136992369933699436995369963699736998369993700037001370023700337004370053700637007370083700937010370113701237013370143701537016370173701837019370203702137022370233702437025370263702737028370293703037031370323703337034370353703637037370383703937040370413704237043370443704537046370473704837049370503705137052370533705437055370563705737058370593706037061370623706337064370653706637067370683706937070370713707237073370743707537076370773707837079370803708137082370833708437085370863708737088370893709037091370923709337094370953709637097370983709937100371013710237103371043710537106371073710837109371103711137112371133711437115371163711737118371193712037121371223712337124371253712637127371283712937130371313713237133371343713537136371373713837139371403714137142371433714437145371463714737148371493715037151371523715337154371553715637157371583715937160371613716237163371643716537166371673716837169371703717137172371733717437175371763717737178371793718037181371823718337184371853718637187371883718937190371913719237193371943719537196371973719837199372003720137202372033720437205372063720737208372093721037211372123721337214372153721637217372183721937220372213722237223372243722537226372273722837229372303723137232372333723437235372363723737238372393724037241372423724337244372453724637247372483724937250372513725237253372543725537256372573725837259372603726137262372633726437265372663726737268372693727037271372723727337274372753727637277372783727937280372813728237283372843728537286372873728837289372903729137292372933729437295372963729737298372993730037301373023730337304373053730637307373083730937310373113731237313373143731537316373173731837319373203732137322373233732437325373263732737328373293733037331373323733337334373353733637337373383733937340373413734237343373443734537346373473734837349373503735137352373533735437355373563735737358373593736037361373623736337364373653736637367373683736937370373713737237373373743737537376373773737837379373803738137382373833738437385373863738737388373893739037391373923739337394373953739637397373983739937400374013740237403374043740537406374073740837409374103741137412374133741437415374163741737418374193742037421374223742337424374253742637427374283742937430374313743237433374343743537436374373743837439374403744137442374433744437445374463744737448374493745037451374523745337454374553745637457374583745937460374613746237463374643746537466374673746837469374703747137472374733747437475374763747737478374793748037481374823748337484374853748637487374883748937490374913749237493374943749537496374973749837499375003750137502375033750437505375063750737508375093751037511375123751337514375153751637517375183751937520375213752237523375243752537526375273752837529375303753137532375333753437535375363753737538375393754037541375423754337544375453754637547375483754937550375513755237553375543755537556375573755837559375603756137562375633756437565375663756737568375693757037571375723757337574375753757637577375783757937580375813758237583375843758537586375873758837589375903759137592375933759437595375963759737598375993760037601376023760337604376053760637607376083760937610376113761237613376143761537616376173761837619376203762137622376233762437625376263762737628376293763037631376323763337634376353763637637376383763937640376413764237643376443764537646376473764837649376503765137652376533765437655376563765737658376593766037661376623766337664376653766637667376683766937670376713767237673376743767537676376773767837679376803768137682376833768437685376863768737688376893769037691376923769337694376953769637697376983769937700377013770237703377043770537706377073770837709377103771137712377133771437715377163771737718377193772037721377223772337724377253772637727377283772937730377313773237733377343773537736377373773837739377403774137742377433774437745377463774737748377493775037751377523775337754377553775637757377583775937760377613776237763377643776537766377673776837769377703777137772377733777437775377763777737778377793778037781377823778337784377853778637787377883778937790377913779237793377943779537796377973779837799378003780137802378033780437805378063780737808378093781037811378123781337814378153781637817378183781937820378213782237823378243782537826378273782837829378303783137832378333783437835378363783737838378393784037841378423784337844378453784637847378483784937850378513785237853378543785537856378573785837859378603786137862378633786437865378663786737868378693787037871378723787337874378753787637877378783787937880378813788237883378843788537886378873788837889378903789137892378933789437895378963789737898378993790037901379023790337904379053790637907379083790937910379113791237913379143791537916379173791837919379203792137922379233792437925379263792737928379293793037931379323793337934379353793637937379383793937940379413794237943379443794537946379473794837949379503795137952379533795437955379563795737958379593796037961379623796337964379653796637967379683796937970379713797237973379743797537976379773797837979379803798137982379833798437985379863798737988379893799037991379923799337994379953799637997379983799938000380013800238003380043800538006380073800838009380103801138012380133801438015380163801738018380193802038021380223802338024380253802638027380283802938030380313803238033380343803538036380373803838039380403804138042380433804438045380463804738048380493805038051380523805338054380553805638057380583805938060380613806238063380643806538066380673806838069380703807138072380733807438075380763807738078380793808038081380823808338084380853808638087380883808938090380913809238093380943809538096380973809838099381003810138102381033810438105381063810738108381093811038111381123811338114381153811638117381183811938120381213812238123381243812538126381273812838129381303813138132381333813438135381363813738138381393814038141381423814338144381453814638147381483814938150381513815238153381543815538156381573815838159381603816138162381633816438165381663816738168381693817038171381723817338174381753817638177381783817938180381813818238183381843818538186381873818838189381903819138192381933819438195381963819738198381993820038201382023820338204382053820638207382083820938210382113821238213382143821538216382173821838219382203822138222382233822438225382263822738228382293823038231382323823338234382353823638237382383823938240382413824238243382443824538246382473824838249382503825138252382533825438255382563825738258382593826038261382623826338264382653826638267382683826938270382713827238273382743827538276382773827838279382803828138282382833828438285382863828738288382893829038291382923829338294382953829638297382983829938300383013830238303383043830538306383073830838309383103831138312383133831438315383163831738318383193832038321383223832338324383253832638327383283832938330383313833238333383343833538336383373833838339383403834138342383433834438345383463834738348383493835038351383523835338354383553835638357383583835938360383613836238363383643836538366383673836838369383703837138372383733837438375383763837738378383793838038381383823838338384383853838638387383883838938390383913839238393383943839538396383973839838399384003840138402384033840438405384063840738408384093841038411384123841338414384153841638417384183841938420384213842238423384243842538426384273842838429384303843138432384333843438435384363843738438384393844038441384423844338444384453844638447384483844938450384513845238453384543845538456384573845838459384603846138462384633846438465384663846738468384693847038471384723847338474384753847638477384783847938480384813848238483384843848538486384873848838489384903849138492384933849438495384963849738498384993850038501385023850338504385053850638507385083850938510385113851238513385143851538516385173851838519385203852138522385233852438525385263852738528385293853038531385323853338534385353853638537385383853938540385413854238543385443854538546385473854838549385503855138552385533855438555385563855738558385593856038561385623856338564385653856638567385683856938570385713857238573385743857538576385773857838579385803858138582385833858438585385863858738588385893859038591385923859338594385953859638597385983859938600386013860238603386043860538606386073860838609386103861138612386133861438615386163861738618386193862038621386223862338624386253862638627386283862938630386313863238633386343863538636386373863838639386403864138642386433864438645386463864738648386493865038651386523865338654386553865638657386583865938660386613866238663386643866538666386673866838669386703867138672386733867438675386763867738678386793868038681386823868338684386853868638687386883868938690386913869238693386943869538696386973869838699387003870138702387033870438705387063870738708387093871038711387123871338714387153871638717387183871938720387213872238723387243872538726387273872838729387303873138732387333873438735387363873738738387393874038741387423874338744387453874638747387483874938750387513875238753387543875538756387573875838759387603876138762387633876438765387663876738768387693877038771387723877338774387753877638777387783877938780387813878238783387843878538786387873878838789387903879138792387933879438795387963879738798387993880038801388023880338804388053880638807388083880938810388113881238813388143881538816388173881838819388203882138822388233882438825388263882738828388293883038831388323883338834388353883638837388383883938840388413884238843388443884538846388473884838849388503885138852388533885438855388563885738858388593886038861388623886338864388653886638867388683886938870388713887238873388743887538876388773887838879388803888138882388833888438885388863888738888388893889038891388923889338894388953889638897388983889938900389013890238903389043890538906389073890838909389103891138912389133891438915389163891738918389193892038921389223892338924389253892638927389283892938930389313893238933389343893538936389373893838939389403894138942389433894438945389463894738948389493895038951389523895338954389553895638957389583895938960389613896238963389643896538966389673896838969389703897138972389733897438975389763897738978389793898038981389823898338984389853898638987389883898938990389913899238993389943899538996389973899838999390003900139002390033900439005390063900739008390093901039011390123901339014390153901639017390183901939020390213902239023390243902539026390273902839029390303903139032390333903439035390363903739038390393904039041390423904339044390453904639047390483904939050390513905239053390543905539056390573905839059390603906139062390633906439065390663906739068390693907039071390723907339074390753907639077390783907939080390813908239083390843908539086390873908839089390903909139092390933909439095390963909739098390993910039101391023910339104391053910639107391083910939110391113911239113391143911539116391173911839119391203912139122391233912439125391263912739128391293913039131391323913339134391353913639137391383913939140391413914239143391443914539146391473914839149391503915139152391533915439155391563915739158391593916039161391623916339164391653916639167391683916939170391713917239173391743917539176391773917839179391803918139182391833918439185391863918739188391893919039191391923919339194391953919639197391983919939200392013920239203392043920539206392073920839209392103921139212392133921439215392163921739218392193922039221392223922339224392253922639227392283922939230392313923239233392343923539236392373923839239392403924139242392433924439245392463924739248392493925039251392523925339254392553925639257392583925939260392613926239263392643926539266392673926839269392703927139272392733927439275392763927739278392793928039281392823928339284392853928639287392883928939290392913929239293392943929539296392973929839299393003930139302393033930439305393063930739308393093931039311393123931339314393153931639317393183931939320393213932239323393243932539326393273932839329393303933139332393333933439335393363933739338393393934039341393423934339344393453934639347393483934939350393513935239353393543935539356393573935839359393603936139362393633936439365393663936739368393693937039371393723937339374393753937639377393783937939380393813938239383393843938539386393873938839389393903939139392393933939439395393963939739398393993940039401394023940339404394053940639407394083940939410394113941239413394143941539416394173941839419394203942139422394233942439425394263942739428394293943039431394323943339434394353943639437394383943939440394413944239443394443944539446394473944839449394503945139452394533945439455394563945739458394593946039461394623946339464394653946639467394683946939470394713947239473394743947539476394773947839479394803948139482394833948439485394863948739488394893949039491394923949339494394953949639497394983949939500395013950239503395043950539506395073950839509395103951139512395133951439515395163951739518395193952039521395223952339524395253952639527395283952939530395313953239533395343953539536395373953839539395403954139542395433954439545395463954739548395493955039551395523955339554395553955639557395583955939560395613956239563395643956539566395673956839569395703957139572395733957439575395763957739578395793958039581395823958339584395853958639587395883958939590395913959239593395943959539596395973959839599396003960139602396033960439605396063960739608396093961039611396123961339614396153961639617396183961939620396213962239623396243962539626396273962839629396303963139632396333963439635396363963739638396393964039641396423964339644396453964639647396483964939650396513965239653396543965539656396573965839659396603966139662396633966439665396663966739668396693967039671396723967339674396753967639677396783967939680396813968239683396843968539686396873968839689396903969139692396933969439695396963969739698396993970039701397023970339704397053970639707397083970939710397113971239713397143971539716397173971839719397203972139722397233972439725397263972739728397293973039731397323973339734397353973639737397383973939740397413974239743397443974539746397473974839749397503975139752397533975439755397563975739758397593976039761397623976339764397653976639767397683976939770397713977239773397743977539776397773977839779397803978139782397833978439785397863978739788397893979039791397923979339794397953979639797397983979939800398013980239803398043980539806398073980839809398103981139812398133981439815398163981739818398193982039821398223982339824398253982639827398283982939830398313983239833398343983539836398373983839839398403984139842398433984439845398463984739848398493985039851398523985339854398553985639857398583985939860398613986239863398643986539866398673986839869398703987139872398733987439875398763987739878398793988039881398823988339884398853988639887398883988939890398913989239893398943989539896398973989839899399003990139902399033990439905399063990739908399093991039911399123991339914399153991639917399183991939920399213992239923399243992539926399273992839929399303993139932399333993439935399363993739938399393994039941399423994339944399453994639947399483994939950399513995239953399543995539956399573995839959399603996139962399633996439965399663996739968399693997039971399723997339974399753997639977399783997939980399813998239983399843998539986399873998839989399903999139992399933999439995399963999739998399994000040001400024000340004400054000640007400084000940010400114001240013400144001540016400174001840019400204002140022400234002440025400264002740028400294003040031400324003340034400354003640037400384003940040400414004240043400444004540046400474004840049400504005140052400534005440055400564005740058400594006040061400624006340064400654006640067400684006940070400714007240073400744007540076400774007840079400804008140082400834008440085400864008740088400894009040091400924009340094400954009640097400984009940100401014010240103401044010540106401074010840109401104011140112401134011440115401164011740118401194012040121401224012340124401254012640127401284012940130401314013240133401344013540136401374013840139401404014140142401434014440145401464014740148401494015040151401524015340154401554015640157401584015940160401614016240163401644016540166401674016840169401704017140172401734017440175401764017740178401794018040181401824018340184401854018640187401884018940190401914019240193401944019540196401974019840199402004020140202402034020440205402064020740208402094021040211402124021340214402154021640217402184021940220402214022240223402244022540226402274022840229402304023140232402334023440235402364023740238402394024040241402424024340244402454024640247402484024940250402514025240253402544025540256402574025840259402604026140262402634026440265402664026740268402694027040271402724027340274402754027640277402784027940280402814028240283402844028540286402874028840289402904029140292402934029440295402964029740298402994030040301403024030340304403054030640307403084030940310403114031240313403144031540316403174031840319403204032140322403234032440325403264032740328403294033040331403324033340334403354033640337403384033940340403414034240343403444034540346403474034840349403504035140352403534035440355403564035740358403594036040361403624036340364403654036640367403684036940370403714037240373403744037540376403774037840379403804038140382403834038440385403864038740388403894039040391403924039340394403954039640397403984039940400404014040240403404044040540406404074040840409404104041140412404134041440415404164041740418404194042040421404224042340424404254042640427404284042940430404314043240433404344043540436404374043840439404404044140442404434044440445404464044740448404494045040451404524045340454404554045640457404584045940460404614046240463404644046540466404674046840469404704047140472404734047440475404764047740478404794048040481404824048340484404854048640487404884048940490404914049240493404944049540496404974049840499405004050140502405034050440505405064050740508405094051040511405124051340514405154051640517405184051940520405214052240523405244052540526405274052840529405304053140532405334053440535405364053740538405394054040541405424054340544405454054640547405484054940550405514055240553405544055540556405574055840559405604056140562405634056440565405664056740568405694057040571405724057340574405754057640577405784057940580405814058240583405844058540586405874058840589405904059140592405934059440595405964059740598405994060040601406024060340604406054060640607406084060940610406114061240613406144061540616406174061840619406204062140622406234062440625406264062740628406294063040631406324063340634406354063640637406384063940640406414064240643406444064540646406474064840649406504065140652406534065440655406564065740658406594066040661406624066340664406654066640667406684066940670406714067240673406744067540676406774067840679406804068140682406834068440685406864068740688406894069040691406924069340694406954069640697406984069940700407014070240703407044070540706407074070840709407104071140712407134071440715407164071740718407194072040721407224072340724407254072640727407284072940730407314073240733407344073540736407374073840739407404074140742407434074440745407464074740748407494075040751407524075340754407554075640757407584075940760407614076240763407644076540766407674076840769407704077140772407734077440775407764077740778407794078040781407824078340784407854078640787407884078940790407914079240793407944079540796407974079840799408004080140802408034080440805408064080740808408094081040811408124081340814408154081640817408184081940820408214082240823408244082540826408274082840829408304083140832408334083440835408364083740838408394084040841408424084340844408454084640847408484084940850408514085240853408544085540856408574085840859408604086140862408634086440865408664086740868408694087040871408724087340874408754087640877408784087940880408814088240883408844088540886408874088840889408904089140892408934089440895408964089740898408994090040901409024090340904409054090640907409084090940910409114091240913409144091540916409174091840919409204092140922409234092440925409264092740928409294093040931409324093340934409354093640937409384093940940409414094240943409444094540946409474094840949409504095140952409534095440955409564095740958409594096040961409624096340964409654096640967409684096940970409714097240973409744097540976409774097840979409804098140982409834098440985409864098740988409894099040991409924099340994409954099640997409984099941000410014100241003410044100541006410074100841009410104101141012410134101441015410164101741018410194102041021410224102341024410254102641027410284102941030410314103241033410344103541036410374103841039410404104141042410434104441045410464104741048410494105041051410524105341054410554105641057410584105941060410614106241063410644106541066410674106841069410704107141072410734107441075410764107741078410794108041081410824108341084410854108641087410884108941090410914109241093410944109541096410974109841099411004110141102411034110441105411064110741108411094111041111411124111341114411154111641117411184111941120411214112241123411244112541126411274112841129411304113141132411334113441135411364113741138411394114041141411424114341144411454114641147411484114941150411514115241153411544115541156411574115841159411604116141162411634116441165411664116741168411694117041171411724117341174411754117641177411784117941180411814118241183411844118541186411874118841189411904119141192411934119441195411964119741198411994120041201412024120341204412054120641207412084120941210412114121241213412144121541216412174121841219412204122141222412234122441225412264122741228412294123041231412324123341234412354123641237412384123941240412414124241243412444124541246412474124841249412504125141252412534125441255412564125741258412594126041261412624126341264412654126641267412684126941270412714127241273412744127541276412774127841279412804128141282412834128441285412864128741288412894129041291412924129341294412954129641297412984129941300413014130241303413044130541306413074130841309413104131141312413134131441315413164131741318413194132041321413224132341324413254132641327413284132941330413314133241333413344133541336413374133841339413404134141342413434134441345413464134741348413494135041351413524135341354413554135641357413584135941360413614136241363413644136541366413674136841369413704137141372413734137441375413764137741378413794138041381413824138341384413854138641387413884138941390413914139241393413944139541396413974139841399414004140141402414034140441405414064140741408414094141041411414124141341414414154141641417414184141941420414214142241423414244142541426414274142841429414304143141432414334143441435414364143741438414394144041441414424144341444414454144641447414484144941450414514145241453414544145541456414574145841459414604146141462414634146441465414664146741468414694147041471414724147341474414754147641477414784147941480414814148241483414844148541486414874148841489414904149141492414934149441495414964149741498414994150041501415024150341504415054150641507415084150941510415114151241513415144151541516415174151841519415204152141522415234152441525415264152741528415294153041531415324153341534415354153641537415384153941540415414154241543415444154541546415474154841549415504155141552415534155441555415564155741558415594156041561415624156341564415654156641567415684156941570415714157241573415744157541576415774157841579415804158141582415834158441585415864158741588415894159041591415924159341594415954159641597415984159941600416014160241603416044160541606416074160841609416104161141612416134161441615416164161741618416194162041621416224162341624416254162641627416284162941630416314163241633416344163541636416374163841639416404164141642416434164441645416464164741648416494165041651416524165341654416554165641657416584165941660416614166241663416644166541666416674166841669416704167141672416734167441675416764167741678416794168041681416824168341684416854168641687416884168941690416914169241693416944169541696416974169841699417004170141702417034170441705417064170741708417094171041711417124171341714417154171641717417184171941720417214172241723417244172541726417274172841729417304173141732417334173441735417364173741738417394174041741417424174341744417454174641747417484174941750417514175241753417544175541756417574175841759417604176141762417634176441765417664176741768417694177041771417724177341774417754177641777417784177941780417814178241783417844178541786417874178841789417904179141792417934179441795417964179741798417994180041801418024180341804418054180641807418084180941810418114181241813418144181541816418174181841819418204182141822418234182441825418264182741828418294183041831418324183341834418354183641837418384183941840418414184241843418444184541846418474184841849418504185141852418534185441855418564185741858418594186041861418624186341864418654186641867418684186941870418714187241873418744187541876418774187841879418804188141882418834188441885418864188741888418894189041891418924189341894418954189641897418984189941900419014190241903419044190541906419074190841909419104191141912419134191441915419164191741918419194192041921419224192341924419254192641927419284192941930419314193241933419344193541936419374193841939419404194141942419434194441945419464194741948419494195041951419524195341954419554195641957419584195941960419614196241963419644196541966419674196841969419704197141972419734197441975419764197741978419794198041981419824198341984419854198641987419884198941990419914199241993419944199541996419974199841999420004200142002420034200442005420064200742008420094201042011420124201342014420154201642017420184201942020420214202242023420244202542026420274202842029420304203142032420334203442035420364203742038420394204042041420424204342044420454204642047420484204942050420514205242053420544205542056420574205842059420604206142062420634206442065420664206742068420694207042071420724207342074420754207642077420784207942080420814208242083420844208542086420874208842089420904209142092420934209442095420964209742098420994210042101421024210342104421054210642107421084210942110421114211242113421144211542116421174211842119421204212142122421234212442125421264212742128421294213042131421324213342134421354213642137421384213942140421414214242143421444214542146421474214842149421504215142152421534215442155421564215742158421594216042161421624216342164421654216642167421684216942170421714217242173421744217542176421774217842179421804218142182421834218442185421864218742188421894219042191421924219342194421954219642197421984219942200422014220242203422044220542206422074220842209422104221142212422134221442215422164221742218422194222042221422224222342224422254222642227422284222942230422314223242233422344223542236422374223842239422404224142242422434224442245422464224742248422494225042251422524225342254422554225642257422584225942260422614226242263422644226542266422674226842269422704227142272422734227442275422764227742278422794228042281422824228342284422854228642287422884228942290422914229242293422944229542296422974229842299423004230142302423034230442305423064230742308423094231042311423124231342314423154231642317423184231942320423214232242323423244232542326423274232842329423304233142332423334233442335423364233742338423394234042341423424234342344423454234642347423484234942350423514235242353423544235542356423574235842359423604236142362423634236442365423664236742368423694237042371423724237342374423754237642377423784237942380423814238242383423844238542386423874238842389423904239142392423934239442395423964239742398423994240042401424024240342404424054240642407424084240942410424114241242413424144241542416424174241842419424204242142422424234242442425424264242742428424294243042431424324243342434424354243642437424384243942440424414244242443424444244542446424474244842449424504245142452424534245442455424564245742458424594246042461424624246342464424654246642467424684246942470424714247242473424744247542476424774247842479424804248142482424834248442485424864248742488424894249042491424924249342494424954249642497424984249942500425014250242503425044250542506425074250842509425104251142512425134251442515425164251742518425194252042521425224252342524425254252642527425284252942530425314253242533425344253542536425374253842539425404254142542425434254442545425464254742548425494255042551425524255342554425554255642557425584255942560425614256242563425644256542566425674256842569425704257142572425734257442575425764257742578425794258042581425824258342584425854258642587425884258942590425914259242593425944259542596425974259842599426004260142602426034260442605426064260742608426094261042611426124261342614426154261642617426184261942620426214262242623426244262542626426274262842629426304263142632426334263442635426364263742638426394264042641426424264342644426454264642647426484264942650426514265242653426544265542656426574265842659426604266142662426634266442665426664266742668426694267042671426724267342674426754267642677426784267942680426814268242683426844268542686426874268842689426904269142692426934269442695426964269742698426994270042701427024270342704427054270642707427084270942710427114271242713427144271542716427174271842719427204272142722427234272442725427264272742728427294273042731427324273342734427354273642737427384273942740427414274242743427444274542746427474274842749427504275142752427534275442755427564275742758427594276042761427624276342764427654276642767427684276942770427714277242773427744277542776427774277842779427804278142782427834278442785427864278742788427894279042791427924279342794427954279642797427984279942800428014280242803428044280542806428074280842809428104281142812428134281442815428164281742818428194282042821428224282342824428254282642827428284282942830428314283242833428344283542836428374283842839428404284142842428434284442845428464284742848428494285042851428524285342854428554285642857428584285942860428614286242863428644286542866428674286842869428704287142872428734287442875428764287742878428794288042881428824288342884428854288642887428884288942890428914289242893428944289542896428974289842899429004290142902429034290442905429064290742908429094291042911429124291342914429154291642917429184291942920429214292242923429244292542926429274292842929429304293142932429334293442935429364293742938429394294042941429424294342944429454294642947429484294942950429514295242953429544295542956429574295842959429604296142962429634296442965429664296742968429694297042971429724297342974429754297642977429784297942980429814298242983429844298542986429874298842989429904299142992429934299442995429964299742998429994300043001430024300343004430054300643007430084300943010430114301243013430144301543016430174301843019430204302143022430234302443025430264302743028430294303043031430324303343034430354303643037430384303943040430414304243043430444304543046430474304843049430504305143052430534305443055430564305743058430594306043061430624306343064430654306643067430684306943070430714307243073430744307543076430774307843079430804308143082430834308443085430864308743088430894309043091430924309343094430954309643097430984309943100431014310243103431044310543106431074310843109431104311143112431134311443115431164311743118431194312043121431224312343124431254312643127431284312943130431314313243133431344313543136431374313843139431404314143142431434314443145431464314743148431494315043151431524315343154431554315643157431584315943160431614316243163431644316543166431674316843169431704317143172431734317443175431764317743178431794318043181431824318343184431854318643187431884318943190431914319243193431944319543196431974319843199432004320143202432034320443205432064320743208432094321043211432124321343214432154321643217432184321943220432214322243223432244322543226432274322843229432304323143232432334323443235432364323743238432394324043241432424324343244432454324643247432484324943250432514325243253432544325543256432574325843259432604326143262432634326443265432664326743268432694327043271432724327343274432754327643277432784327943280432814328243283432844328543286432874328843289432904329143292432934329443295432964329743298432994330043301433024330343304433054330643307433084330943310433114331243313433144331543316433174331843319433204332143322433234332443325433264332743328433294333043331433324333343334433354333643337433384333943340433414334243343433444334543346433474334843349433504335143352433534335443355433564335743358433594336043361433624336343364433654336643367433684336943370433714337243373433744337543376433774337843379433804338143382433834338443385433864338743388433894339043391433924339343394433954339643397433984339943400434014340243403434044340543406434074340843409434104341143412434134341443415434164341743418434194342043421434224342343424434254342643427434284342943430434314343243433434344343543436434374343843439434404344143442434434344443445434464344743448434494345043451434524345343454434554345643457434584345943460434614346243463434644346543466434674346843469434704347143472434734347443475434764347743478434794348043481434824348343484434854348643487434884348943490434914349243493434944349543496434974349843499435004350143502435034350443505435064350743508435094351043511435124351343514435154351643517435184351943520435214352243523435244352543526435274352843529435304353143532435334353443535435364353743538435394354043541435424354343544435454354643547435484354943550435514355243553435544355543556435574355843559435604356143562435634356443565435664356743568435694357043571435724357343574435754357643577435784357943580435814358243583435844358543586435874358843589435904359143592435934359443595435964359743598435994360043601436024360343604436054360643607436084360943610436114361243613436144361543616436174361843619436204362143622436234362443625436264362743628436294363043631436324363343634436354363643637436384363943640436414364243643436444364543646436474364843649436504365143652436534365443655436564365743658436594366043661436624366343664436654366643667436684366943670436714367243673436744367543676436774367843679436804368143682436834368443685436864368743688436894369043691436924369343694436954369643697436984369943700437014370243703437044370543706437074370843709437104371143712437134371443715437164371743718437194372043721437224372343724437254372643727437284372943730437314373243733437344373543736437374373843739437404374143742437434374443745437464374743748437494375043751437524375343754437554375643757437584375943760437614376243763437644376543766437674376843769437704377143772437734377443775437764377743778437794378043781437824378343784437854378643787437884378943790437914379243793437944379543796437974379843799438004380143802438034380443805438064380743808438094381043811438124381343814438154381643817438184381943820438214382243823438244382543826438274382843829438304383143832438334383443835438364383743838438394384043841438424384343844438454384643847438484384943850438514385243853438544385543856438574385843859438604386143862438634386443865438664386743868438694387043871438724387343874438754387643877438784387943880438814388243883438844388543886438874388843889438904389143892438934389443895438964389743898438994390043901439024390343904439054390643907439084390943910439114391243913439144391543916439174391843919439204392143922439234392443925439264392743928439294393043931439324393343934439354393643937439384393943940439414394243943439444394543946439474394843949439504395143952439534395443955439564395743958439594396043961439624396343964439654396643967439684396943970439714397243973439744397543976439774397843979439804398143982439834398443985439864398743988439894399043991439924399343994439954399643997439984399944000440014400244003440044400544006440074400844009440104401144012440134401444015440164401744018440194402044021440224402344024440254402644027440284402944030440314403244033440344403544036440374403844039440404404144042440434404444045440464404744048440494405044051440524405344054440554405644057440584405944060440614406244063440644406544066440674406844069440704407144072440734407444075440764407744078440794408044081440824408344084440854408644087440884408944090440914409244093440944409544096440974409844099441004410144102441034410444105441064410744108441094411044111441124411344114441154411644117441184411944120441214412244123441244412544126441274412844129441304413144132441334413444135441364413744138441394414044141441424414344144441454414644147441484414944150441514415244153441544415544156441574415844159441604416144162441634416444165441664416744168441694417044171441724417344174441754417644177441784417944180441814418244183441844418544186441874418844189441904419144192441934419444195441964419744198441994420044201442024420344204442054420644207442084420944210442114421244213442144421544216442174421844219442204422144222442234422444225442264422744228442294423044231442324423344234442354423644237442384423944240442414424244243442444424544246442474424844249442504425144252442534425444255442564425744258442594426044261442624426344264442654426644267442684426944270442714427244273442744427544276442774427844279442804428144282442834428444285442864428744288442894429044291442924429344294442954429644297442984429944300443014430244303443044430544306443074430844309443104431144312443134431444315443164431744318443194432044321443224432344324443254432644327443284432944330443314433244333443344433544336443374433844339443404434144342443434434444345443464434744348443494435044351443524435344354443554435644357443584435944360443614436244363443644436544366443674436844369443704437144372443734437444375443764437744378443794438044381443824438344384443854438644387443884438944390443914439244393443944439544396443974439844399444004440144402444034440444405444064440744408444094441044411444124441344414444154441644417444184441944420444214442244423444244442544426444274442844429444304443144432444334443444435444364443744438444394444044441444424444344444444454444644447444484444944450444514445244453444544445544456444574445844459444604446144462444634446444465444664446744468444694447044471444724447344474444754447644477444784447944480444814448244483444844448544486444874448844489444904449144492444934449444495444964449744498444994450044501445024450344504445054450644507445084450944510445114451244513445144451544516445174451844519445204452144522445234452444525445264452744528445294453044531445324453344534445354453644537445384453944540445414454244543445444454544546445474454844549445504455144552445534455444555445564455744558445594456044561445624456344564445654456644567445684456944570445714457244573445744457544576445774457844579445804458144582445834458444585445864458744588445894459044591445924459344594445954459644597445984459944600446014460244603446044460544606446074460844609446104461144612446134461444615446164461744618446194462044621446224462344624446254462644627446284462944630446314463244633446344463544636446374463844639446404464144642446434464444645446464464744648446494465044651446524465344654446554465644657446584465944660446614466244663446644466544666446674466844669446704467144672446734467444675446764467744678446794468044681446824468344684446854468644687446884468944690446914469244693446944469544696446974469844699447004470144702447034470444705447064470744708447094471044711447124471344714447154471644717447184471944720447214472244723447244472544726447274472844729447304473144732447334473444735447364473744738447394474044741447424474344744447454474644747447484474944750447514475244753447544475544756447574475844759447604476144762447634476444765447664476744768447694477044771447724477344774447754477644777447784477944780447814478244783447844478544786447874478844789447904479144792447934479444795447964479744798447994480044801448024480344804448054480644807448084480944810448114481244813448144481544816448174481844819448204482144822448234482444825448264482744828448294483044831448324483344834448354483644837448384483944840448414484244843448444484544846448474484844849448504485144852448534485444855448564485744858448594486044861448624486344864448654486644867448684486944870448714487244873448744487544876448774487844879448804488144882448834488444885448864488744888448894489044891448924489344894448954489644897448984489944900449014490244903449044490544906449074490844909449104491144912449134491444915449164491744918449194492044921449224492344924449254492644927449284492944930449314493244933449344493544936449374493844939449404494144942449434494444945449464494744948449494495044951449524495344954449554495644957449584495944960449614496244963449644496544966449674496844969449704497144972449734497444975449764497744978449794498044981449824498344984449854498644987449884498944990449914499244993449944499544996449974499844999450004500145002450034500445005450064500745008450094501045011450124501345014450154501645017450184501945020450214502245023450244502545026450274502845029450304503145032450334503445035450364503745038450394504045041450424504345044450454504645047450484504945050450514505245053450544505545056450574505845059450604506145062450634506445065450664506745068450694507045071450724507345074450754507645077450784507945080450814508245083450844508545086450874508845089450904509145092450934509445095450964509745098450994510045101451024510345104451054510645107451084510945110451114511245113451144511545116451174511845119451204512145122451234512445125451264512745128451294513045131451324513345134451354513645137451384513945140451414514245143451444514545146451474514845149451504515145152451534515445155451564515745158451594516045161451624516345164451654516645167451684516945170451714517245173451744517545176451774517845179451804518145182451834518445185451864518745188451894519045191451924519345194451954519645197451984519945200452014520245203452044520545206452074520845209452104521145212452134521445215452164521745218452194522045221452224522345224452254522645227452284522945230452314523245233452344523545236452374523845239452404524145242452434524445245452464524745248452494525045251452524525345254452554525645257452584525945260452614526245263452644526545266452674526845269452704527145272452734527445275452764527745278452794528045281452824528345284452854528645287452884528945290452914529245293452944529545296452974529845299453004530145302453034530445305453064530745308453094531045311453124531345314453154531645317453184531945320453214532245323453244532545326453274532845329453304533145332453334533445335453364533745338453394534045341453424534345344453454534645347453484534945350453514535245353453544535545356453574535845359453604536145362453634536445365453664536745368453694537045371453724537345374453754537645377453784537945380453814538245383453844538545386453874538845389453904539145392453934539445395453964539745398453994540045401454024540345404454054540645407454084540945410454114541245413454144541545416454174541845419454204542145422454234542445425454264542745428454294543045431454324543345434454354543645437454384543945440454414544245443454444544545446454474544845449454504545145452454534545445455454564545745458454594546045461454624546345464454654546645467454684546945470454714547245473454744547545476454774547845479454804548145482454834548445485454864548745488454894549045491454924549345494454954549645497454984549945500455014550245503455044550545506455074550845509455104551145512455134551445515455164551745518455194552045521455224552345524455254552645527455284552945530455314553245533455344553545536455374553845539455404554145542455434554445545455464554745548455494555045551455524555345554455554555645557455584555945560455614556245563455644556545566455674556845569455704557145572455734557445575455764557745578455794558045581455824558345584455854558645587455884558945590455914559245593455944559545596455974559845599456004560145602456034560445605456064560745608456094561045611456124561345614456154561645617456184561945620456214562245623456244562545626456274562845629456304563145632456334563445635456364563745638456394564045641456424564345644456454564645647456484564945650456514565245653456544565545656456574565845659456604566145662456634566445665456664566745668456694567045671456724567345674456754567645677456784567945680456814568245683456844568545686456874568845689456904569145692456934569445695456964569745698456994570045701457024570345704457054570645707457084570945710457114571245713457144571545716457174571845719457204572145722457234572445725457264572745728457294573045731457324573345734457354573645737457384573945740457414574245743457444574545746457474574845749457504575145752457534575445755457564575745758457594576045761457624576345764457654576645767457684576945770457714577245773457744577545776457774577845779457804578145782457834578445785457864578745788457894579045791457924579345794457954579645797457984579945800458014580245803458044580545806458074580845809458104581145812458134581445815458164581745818458194582045821458224582345824458254582645827458284582945830458314583245833458344583545836458374583845839458404584145842458434584445845458464584745848458494585045851458524585345854458554585645857458584585945860458614586245863458644586545866458674586845869458704587145872458734587445875458764587745878458794588045881458824588345884458854588645887458884588945890458914589245893458944589545896458974589845899459004590145902459034590445905459064590745908459094591045911459124591345914459154591645917459184591945920459214592245923459244592545926459274592845929459304593145932459334593445935459364593745938459394594045941459424594345944459454594645947459484594945950459514595245953459544595545956459574595845959459604596145962459634596445965459664596745968459694597045971459724597345974459754597645977459784597945980459814598245983459844598545986459874598845989459904599145992459934599445995459964599745998459994600046001460024600346004460054600646007460084600946010460114601246013460144601546016460174601846019460204602146022460234602446025460264602746028460294603046031460324603346034460354603646037460384603946040460414604246043460444604546046460474604846049460504605146052460534605446055460564605746058460594606046061460624606346064460654606646067460684606946070460714607246073460744607546076460774607846079460804608146082460834608446085460864608746088460894609046091460924609346094460954609646097460984609946100461014610246103461044610546106461074610846109461104611146112461134611446115461164611746118461194612046121461224612346124461254612646127461284612946130461314613246133461344613546136461374613846139461404614146142461434614446145461464614746148461494615046151461524615346154461554615646157461584615946160461614616246163461644616546166461674616846169461704617146172461734617446175461764617746178461794618046181461824618346184461854618646187461884618946190461914619246193461944619546196461974619846199462004620146202462034620446205462064620746208462094621046211462124621346214462154621646217462184621946220462214622246223462244622546226462274622846229462304623146232462334623446235462364623746238462394624046241462424624346244462454624646247462484624946250462514625246253462544625546256462574625846259462604626146262462634626446265462664626746268462694627046271462724627346274462754627646277462784627946280462814628246283462844628546286462874628846289462904629146292462934629446295462964629746298462994630046301463024630346304463054630646307463084630946310463114631246313463144631546316463174631846319463204632146322463234632446325463264632746328463294633046331463324633346334463354633646337463384633946340463414634246343463444634546346463474634846349463504635146352463534635446355463564635746358463594636046361463624636346364463654636646367463684636946370463714637246373463744637546376463774637846379463804638146382463834638446385463864638746388463894639046391463924639346394463954639646397463984639946400464014640246403464044640546406464074640846409464104641146412464134641446415464164641746418464194642046421464224642346424464254642646427464284642946430464314643246433464344643546436464374643846439464404644146442464434644446445464464644746448464494645046451464524645346454464554645646457464584645946460464614646246463464644646546466464674646846469464704647146472464734647446475464764647746478464794648046481464824648346484464854648646487464884648946490464914649246493464944649546496464974649846499465004650146502465034650446505465064650746508465094651046511465124651346514465154651646517465184651946520465214652246523465244652546526465274652846529465304653146532465334653446535465364653746538465394654046541465424654346544465454654646547465484654946550465514655246553465544655546556465574655846559465604656146562465634656446565465664656746568465694657046571465724657346574465754657646577465784657946580465814658246583465844658546586465874658846589465904659146592465934659446595465964659746598465994660046601466024660346604466054660646607466084660946610466114661246613466144661546616466174661846619466204662146622466234662446625466264662746628466294663046631466324663346634466354663646637466384663946640466414664246643466444664546646466474664846649466504665146652466534665446655466564665746658466594666046661466624666346664466654666646667466684666946670466714667246673466744667546676466774667846679466804668146682466834668446685466864668746688466894669046691466924669346694466954669646697466984669946700467014670246703467044670546706467074670846709467104671146712467134671446715467164671746718467194672046721467224672346724467254672646727467284672946730467314673246733467344673546736467374673846739467404674146742467434674446745467464674746748467494675046751467524675346754467554675646757467584675946760467614676246763467644676546766467674676846769467704677146772467734677446775467764677746778467794678046781467824678346784467854678646787467884678946790467914679246793467944679546796467974679846799468004680146802468034680446805468064680746808468094681046811468124681346814468154681646817468184681946820468214682246823468244682546826468274682846829468304683146832468334683446835468364683746838468394684046841468424684346844468454684646847468484684946850468514685246853468544685546856468574685846859468604686146862468634686446865468664686746868468694687046871468724687346874468754687646877468784687946880468814688246883468844688546886468874688846889468904689146892468934689446895468964689746898468994690046901469024690346904469054690646907469084690946910469114691246913469144691546916469174691846919469204692146922469234692446925469264692746928469294693046931469324693346934469354693646937469384693946940469414694246943469444694546946469474694846949469504695146952469534695446955469564695746958469594696046961469624696346964469654696646967469684696946970469714697246973469744697546976469774697846979469804698146982469834698446985469864698746988469894699046991469924699346994469954699646997469984699947000470014700247003470044700547006470074700847009470104701147012470134701447015470164701747018470194702047021470224702347024470254702647027470284702947030470314703247033470344703547036470374703847039470404704147042470434704447045470464704747048470494705047051470524705347054470554705647057470584705947060470614706247063470644706547066470674706847069470704707147072470734707447075470764707747078470794708047081470824708347084470854708647087470884708947090470914709247093470944709547096470974709847099471004710147102471034710447105471064710747108471094711047111471124711347114471154711647117471184711947120471214712247123471244712547126471274712847129471304713147132471334713447135471364713747138471394714047141471424714347144471454714647147471484714947150471514715247153471544715547156471574715847159471604716147162471634716447165471664716747168471694717047171471724717347174471754717647177471784717947180471814718247183471844718547186471874718847189471904719147192471934719447195471964719747198471994720047201472024720347204472054720647207472084720947210472114721247213472144721547216472174721847219472204722147222472234722447225472264722747228472294723047231472324723347234472354723647237472384723947240472414724247243472444724547246472474724847249472504725147252472534725447255472564725747258472594726047261472624726347264472654726647267472684726947270472714727247273472744727547276472774727847279472804728147282472834728447285472864728747288472894729047291472924729347294472954729647297472984729947300473014730247303473044730547306473074730847309473104731147312473134731447315473164731747318473194732047321473224732347324473254732647327473284732947330473314733247333473344733547336473374733847339473404734147342473434734447345473464734747348473494735047351473524735347354473554735647357473584735947360473614736247363473644736547366473674736847369473704737147372473734737447375473764737747378473794738047381473824738347384473854738647387473884738947390473914739247393473944739547396473974739847399474004740147402474034740447405474064740747408474094741047411474124741347414474154741647417474184741947420474214742247423474244742547426474274742847429474304743147432474334743447435474364743747438474394744047441474424744347444474454744647447474484744947450474514745247453474544745547456474574745847459474604746147462474634746447465474664746747468474694747047471474724747347474474754747647477474784747947480474814748247483474844748547486474874748847489474904749147492474934749447495474964749747498474994750047501475024750347504475054750647507475084750947510475114751247513475144751547516475174751847519475204752147522475234752447525475264752747528475294753047531475324753347534475354753647537475384753947540475414754247543475444754547546475474754847549475504755147552475534755447555475564755747558475594756047561475624756347564475654756647567475684756947570475714757247573475744757547576475774757847579475804758147582475834758447585475864758747588475894759047591475924759347594475954759647597475984759947600476014760247603476044760547606476074760847609476104761147612476134761447615476164761747618476194762047621476224762347624476254762647627476284762947630476314763247633476344763547636476374763847639476404764147642476434764447645476464764747648476494765047651476524765347654476554765647657476584765947660476614766247663476644766547666476674766847669476704767147672476734767447675476764767747678476794768047681476824768347684476854768647687476884768947690476914769247693476944769547696476974769847699477004770147702477034770447705477064770747708477094771047711477124771347714477154771647717477184771947720477214772247723477244772547726477274772847729477304773147732477334773447735477364773747738477394774047741477424774347744477454774647747477484774947750477514775247753477544775547756477574775847759477604776147762477634776447765477664776747768477694777047771477724777347774477754777647777477784777947780477814778247783477844778547786477874778847789477904779147792477934779447795477964779747798477994780047801478024780347804478054780647807478084780947810478114781247813478144781547816478174781847819478204782147822478234782447825478264782747828478294783047831478324783347834478354783647837478384783947840478414784247843478444784547846478474784847849478504785147852478534785447855478564785747858478594786047861478624786347864478654786647867478684786947870478714787247873478744787547876478774787847879478804788147882478834788447885478864788747888478894789047891478924789347894478954789647897478984789947900479014790247903479044790547906479074790847909479104791147912479134791447915479164791747918479194792047921479224792347924479254792647927479284792947930479314793247933479344793547936479374793847939479404794147942479434794447945479464794747948479494795047951479524795347954479554795647957479584795947960479614796247963479644796547966479674796847969479704797147972479734797447975479764797747978479794798047981479824798347984479854798647987479884798947990479914799247993479944799547996479974799847999480004800148002480034800448005480064800748008480094801048011480124801348014480154801648017480184801948020480214802248023480244802548026480274802848029480304803148032480334803448035480364803748038480394804048041480424804348044480454804648047480484804948050480514805248053480544805548056480574805848059480604806148062480634806448065480664806748068480694807048071480724807348074480754807648077480784807948080480814808248083480844808548086480874808848089480904809148092480934809448095480964809748098480994810048101481024810348104481054810648107481084810948110481114811248113481144811548116481174811848119481204812148122481234812448125481264812748128481294813048131481324813348134481354813648137481384813948140481414814248143481444814548146481474814848149481504815148152481534815448155481564815748158481594816048161481624816348164481654816648167481684816948170481714817248173481744817548176481774817848179481804818148182481834818448185481864818748188481894819048191481924819348194481954819648197481984819948200482014820248203482044820548206482074820848209482104821148212482134821448215482164821748218482194822048221482224822348224482254822648227482284822948230482314823248233482344823548236482374823848239482404824148242482434824448245482464824748248482494825048251482524825348254482554825648257482584825948260482614826248263482644826548266482674826848269482704827148272482734827448275482764827748278482794828048281482824828348284482854828648287482884828948290482914829248293482944829548296482974829848299483004830148302483034830448305483064830748308483094831048311483124831348314483154831648317483184831948320483214832248323483244832548326483274832848329483304833148332483334833448335483364833748338483394834048341483424834348344483454834648347483484834948350483514835248353483544835548356483574835848359483604836148362483634836448365483664836748368483694837048371483724837348374483754837648377483784837948380483814838248383483844838548386483874838848389483904839148392483934839448395483964839748398483994840048401484024840348404484054840648407484084840948410484114841248413484144841548416484174841848419484204842148422484234842448425484264842748428484294843048431484324843348434484354843648437484384843948440484414844248443484444844548446484474844848449484504845148452484534845448455484564845748458484594846048461484624846348464484654846648467484684846948470484714847248473484744847548476484774847848479484804848148482484834848448485484864848748488484894849048491484924849348494484954849648497484984849948500485014850248503485044850548506485074850848509485104851148512485134851448515485164851748518485194852048521485224852348524485254852648527485284852948530485314853248533485344853548536485374853848539485404854148542485434854448545485464854748548485494855048551485524855348554485554855648557485584855948560485614856248563485644856548566485674856848569485704857148572485734857448575485764857748578485794858048581485824858348584485854858648587485884858948590485914859248593485944859548596485974859848599486004860148602486034860448605486064860748608486094861048611486124861348614486154861648617486184861948620486214862248623486244862548626486274862848629486304863148632486334863448635486364863748638486394864048641486424864348644486454864648647486484864948650486514865248653486544865548656486574865848659486604866148662486634866448665486664866748668486694867048671486724867348674486754867648677486784867948680486814868248683486844868548686486874868848689486904869148692486934869448695486964869748698486994870048701487024870348704487054870648707487084870948710487114871248713487144871548716487174871848719487204872148722487234872448725487264872748728487294873048731487324873348734487354873648737487384873948740487414874248743487444874548746487474874848749487504875148752487534875448755487564875748758487594876048761487624876348764487654876648767487684876948770487714877248773487744877548776487774877848779487804878148782487834878448785487864878748788487894879048791487924879348794487954879648797487984879948800488014880248803488044880548806488074880848809488104881148812488134881448815488164881748818488194882048821488224882348824488254882648827488284882948830488314883248833488344883548836488374883848839488404884148842488434884448845488464884748848488494885048851488524885348854488554885648857488584885948860488614886248863488644886548866488674886848869488704887148872488734887448875488764887748878488794888048881488824888348884488854888648887488884888948890488914889248893488944889548896488974889848899489004890148902489034890448905489064890748908489094891048911489124891348914489154891648917489184891948920489214892248923489244892548926489274892848929489304893148932489334893448935489364893748938489394894048941489424894348944489454894648947489484894948950489514895248953489544895548956489574895848959489604896148962489634896448965489664896748968489694897048971489724897348974489754897648977489784897948980489814898248983489844898548986489874898848989489904899148992489934899448995489964899748998489994900049001490024900349004490054900649007490084900949010490114901249013490144901549016490174901849019490204902149022490234902449025490264902749028490294903049031490324903349034490354903649037490384903949040490414904249043490444904549046490474904849049490504905149052490534905449055490564905749058490594906049061490624906349064490654906649067490684906949070490714907249073490744907549076490774907849079490804908149082490834908449085490864908749088490894909049091490924909349094490954909649097490984909949100491014910249103491044910549106491074910849109491104911149112491134911449115491164911749118491194912049121491224912349124491254912649127491284912949130491314913249133491344913549136491374913849139491404914149142491434914449145491464914749148491494915049151491524915349154491554915649157491584915949160491614916249163491644916549166491674916849169491704917149172491734917449175491764917749178491794918049181491824918349184491854918649187491884918949190491914919249193491944919549196491974919849199492004920149202492034920449205492064920749208492094921049211492124921349214492154921649217492184921949220492214922249223492244922549226492274922849229492304923149232492334923449235492364923749238492394924049241492424924349244492454924649247492484924949250492514925249253492544925549256492574925849259492604926149262492634926449265492664926749268492694927049271492724927349274492754927649277492784927949280492814928249283492844928549286492874928849289492904929149292492934929449295492964929749298492994930049301493024930349304493054930649307493084930949310493114931249313493144931549316493174931849319493204932149322493234932449325493264932749328493294933049331493324933349334493354933649337493384933949340493414934249343493444934549346493474934849349493504935149352493534935449355493564935749358493594936049361493624936349364493654936649367493684936949370493714937249373493744937549376493774937849379493804938149382493834938449385493864938749388493894939049391493924939349394493954939649397493984939949400494014940249403494044940549406494074940849409494104941149412494134941449415494164941749418494194942049421494224942349424494254942649427494284942949430494314943249433494344943549436494374943849439494404944149442494434944449445494464944749448494494945049451494524945349454494554945649457494584945949460494614946249463494644946549466494674946849469494704947149472494734947449475494764947749478494794948049481494824948349484494854948649487494884948949490494914949249493494944949549496494974949849499495004950149502495034950449505495064950749508495094951049511495124951349514495154951649517495184951949520495214952249523495244952549526495274952849529495304953149532495334953449535495364953749538495394954049541495424954349544495454954649547495484954949550495514955249553495544955549556495574955849559495604956149562495634956449565495664956749568495694957049571495724957349574495754957649577495784957949580495814958249583495844958549586495874958849589495904959149592495934959449595495964959749598495994960049601496024960349604496054960649607496084960949610496114961249613496144961549616496174961849619496204962149622496234962449625496264962749628496294963049631496324963349634496354963649637496384963949640496414964249643496444964549646496474964849649496504965149652496534965449655496564965749658496594966049661496624966349664496654966649667496684966949670496714967249673496744967549676496774967849679496804968149682496834968449685496864968749688496894969049691496924969349694496954969649697496984969949700497014970249703497044970549706497074970849709497104971149712497134971449715497164971749718497194972049721497224972349724497254972649727497284972949730497314973249733497344973549736497374973849739497404974149742497434974449745497464974749748497494975049751497524975349754497554975649757497584975949760497614976249763497644976549766497674976849769497704977149772497734977449775497764977749778497794978049781497824978349784497854978649787497884978949790497914979249793497944979549796497974979849799498004980149802498034980449805498064980749808498094981049811498124981349814498154981649817498184981949820498214982249823498244982549826498274982849829498304983149832498334983449835498364983749838498394984049841498424984349844498454984649847498484984949850498514985249853498544985549856498574985849859498604986149862498634986449865498664986749868498694987049871498724987349874498754987649877498784987949880498814988249883498844988549886498874988849889498904989149892498934989449895498964989749898498994990049901499024990349904499054990649907499084990949910499114991249913499144991549916499174991849919499204992149922499234992449925499264992749928499294993049931499324993349934499354993649937499384993949940499414994249943499444994549946499474994849949499504995149952499534995449955499564995749958499594996049961499624996349964499654996649967499684996949970499714997249973499744997549976499774997849979499804998149982499834998449985499864998749988499894999049991499924999349994499954999649997499984999950000500015000250003500045000550006500075000850009500105001150012500135001450015500165001750018500195002050021500225002350024500255002650027500285002950030500315003250033500345003550036500375003850039500405004150042500435004450045500465004750048500495005050051500525005350054500555005650057500585005950060500615006250063500645006550066500675006850069500705007150072500735007450075500765007750078500795008050081500825008350084500855008650087500885008950090500915009250093500945009550096500975009850099501005010150102501035010450105501065010750108501095011050111501125011350114501155011650117501185011950120501215012250123501245012550126501275012850129501305013150132501335013450135501365013750138501395014050141501425014350144501455014650147501485014950150501515015250153501545015550156501575015850159501605016150162501635016450165501665016750168501695017050171501725017350174501755017650177501785017950180501815018250183501845018550186501875018850189501905019150192501935019450195501965019750198501995020050201502025020350204502055020650207502085020950210502115021250213502145021550216502175021850219502205022150222502235022450225502265022750228502295023050231502325023350234502355023650237502385023950240502415024250243502445024550246502475024850249502505025150252502535025450255502565025750258502595026050261502625026350264502655026650267502685026950270502715027250273502745027550276502775027850279502805028150282502835028450285502865028750288502895029050291502925029350294502955029650297502985029950300503015030250303503045030550306503075030850309503105031150312503135031450315503165031750318503195032050321503225032350324503255032650327503285032950330503315033250333503345033550336503375033850339503405034150342503435034450345503465034750348503495035050351503525035350354503555035650357503585035950360503615036250363503645036550366503675036850369503705037150372503735037450375503765037750378503795038050381503825038350384503855038650387503885038950390503915039250393503945039550396503975039850399504005040150402504035040450405504065040750408504095041050411504125041350414504155041650417504185041950420504215042250423504245042550426504275042850429504305043150432504335043450435504365043750438504395044050441504425044350444504455044650447504485044950450504515045250453504545045550456504575045850459504605046150462504635046450465504665046750468504695047050471504725047350474504755047650477504785047950480504815048250483504845048550486504875048850489504905049150492504935049450495504965049750498504995050050501505025050350504505055050650507505085050950510505115051250513505145051550516505175051850519505205052150522505235052450525505265052750528505295053050531505325053350534505355053650537505385053950540505415054250543505445054550546505475054850549505505055150552505535055450555505565055750558505595056050561505625056350564505655056650567505685056950570505715057250573505745057550576505775057850579505805058150582505835058450585505865058750588505895059050591505925059350594505955059650597505985059950600506015060250603506045060550606506075060850609506105061150612506135061450615506165061750618506195062050621506225062350624506255062650627506285062950630506315063250633506345063550636506375063850639506405064150642506435064450645506465064750648506495065050651506525065350654506555065650657506585065950660506615066250663506645066550666506675066850669506705067150672506735067450675506765067750678506795068050681506825068350684506855068650687506885068950690506915069250693506945069550696506975069850699507005070150702507035070450705507065070750708507095071050711507125071350714507155071650717507185071950720507215072250723507245072550726507275072850729507305073150732507335073450735507365073750738507395074050741507425074350744507455074650747507485074950750507515075250753507545075550756507575075850759507605076150762507635076450765507665076750768507695077050771507725077350774507755077650777507785077950780507815078250783507845078550786507875078850789507905079150792507935079450795507965079750798507995080050801508025080350804508055080650807508085080950810508115081250813508145081550816508175081850819508205082150822508235082450825508265082750828508295083050831508325083350834508355083650837508385083950840508415084250843508445084550846508475084850849508505085150852508535085450855508565085750858508595086050861508625086350864508655086650867508685086950870508715087250873508745087550876508775087850879508805088150882508835088450885508865088750888508895089050891508925089350894508955089650897508985089950900509015090250903509045090550906509075090850909509105091150912509135091450915509165091750918509195092050921509225092350924509255092650927509285092950930509315093250933509345093550936509375093850939509405094150942509435094450945509465094750948509495095050951509525095350954509555095650957509585095950960509615096250963509645096550966509675096850969509705097150972509735097450975509765097750978509795098050981509825098350984509855098650987509885098950990509915099250993509945099550996509975099850999510005100151002510035100451005510065100751008510095101051011510125101351014510155101651017510185101951020510215102251023510245102551026510275102851029510305103151032510335103451035510365103751038510395104051041510425104351044510455104651047510485104951050510515105251053510545105551056510575105851059510605106151062510635106451065510665106751068510695107051071510725107351074510755107651077510785107951080510815108251083510845108551086510875108851089510905109151092510935109451095510965109751098510995110051101511025110351104511055110651107511085110951110511115111251113511145111551116511175111851119511205112151122511235112451125511265112751128511295113051131511325113351134511355113651137511385113951140511415114251143511445114551146511475114851149511505115151152511535115451155511565115751158511595116051161511625116351164511655116651167511685116951170511715117251173511745117551176511775117851179511805118151182511835118451185511865118751188511895119051191511925119351194511955119651197511985119951200512015120251203512045120551206512075120851209512105121151212512135121451215512165121751218512195122051221512225122351224512255122651227512285122951230512315123251233512345123551236512375123851239512405124151242512435124451245512465124751248512495125051251512525125351254512555125651257512585125951260512615126251263512645126551266512675126851269512705127151272512735127451275512765127751278512795128051281512825128351284512855128651287512885128951290512915129251293512945129551296512975129851299513005130151302513035130451305513065130751308513095131051311513125131351314513155131651317513185131951320513215132251323513245132551326513275132851329513305133151332513335133451335513365133751338513395134051341513425134351344513455134651347513485134951350513515135251353513545135551356513575135851359513605136151362513635136451365513665136751368513695137051371513725137351374513755137651377513785137951380513815138251383513845138551386513875138851389513905139151392513935139451395513965139751398513995140051401514025140351404514055140651407514085140951410514115141251413514145141551416514175141851419514205142151422514235142451425514265142751428514295143051431514325143351434514355143651437514385143951440514415144251443514445144551446514475144851449514505145151452514535145451455514565145751458514595146051461514625146351464514655146651467514685146951470514715147251473514745147551476514775147851479514805148151482514835148451485514865148751488514895149051491514925149351494514955149651497514985149951500515015150251503515045150551506515075150851509515105151151512515135151451515515165151751518515195152051521515225152351524515255152651527515285152951530515315153251533515345153551536515375153851539515405154151542515435154451545515465154751548515495155051551515525155351554515555155651557515585155951560515615156251563515645156551566515675156851569515705157151572515735157451575515765157751578515795158051581515825158351584515855158651587515885158951590515915159251593515945159551596515975159851599516005160151602516035160451605516065160751608516095161051611516125161351614516155161651617516185161951620516215162251623516245162551626516275162851629516305163151632516335163451635516365163751638516395164051641516425164351644516455164651647516485164951650516515165251653516545165551656516575165851659516605166151662516635166451665516665166751668516695167051671516725167351674516755167651677516785167951680516815168251683516845168551686516875168851689516905169151692516935169451695516965169751698516995170051701517025170351704517055170651707517085170951710517115171251713517145171551716517175171851719517205172151722517235172451725517265172751728517295173051731517325173351734517355173651737517385173951740517415174251743517445174551746517475174851749517505175151752517535175451755517565175751758517595176051761517625176351764517655176651767517685176951770517715177251773517745177551776517775177851779517805178151782517835178451785517865178751788517895179051791517925179351794517955179651797517985179951800518015180251803518045180551806518075180851809518105181151812518135181451815518165181751818518195182051821518225182351824518255182651827518285182951830518315183251833518345183551836518375183851839518405184151842518435184451845518465184751848518495185051851518525185351854518555185651857518585185951860518615186251863518645186551866518675186851869518705187151872518735187451875518765187751878518795188051881518825188351884518855188651887518885188951890518915189251893518945189551896518975189851899519005190151902519035190451905519065190751908519095191051911519125191351914519155191651917519185191951920519215192251923519245192551926519275192851929519305193151932519335193451935519365193751938519395194051941519425194351944519455194651947519485194951950519515195251953519545195551956519575195851959519605196151962519635196451965519665196751968519695197051971519725197351974519755197651977519785197951980519815198251983519845198551986519875198851989519905199151992519935199451995519965199751998519995200052001520025200352004520055200652007520085200952010520115201252013520145201552016520175201852019520205202152022520235202452025520265202752028520295203052031520325203352034520355203652037520385203952040520415204252043520445204552046520475204852049520505205152052520535205452055520565205752058520595206052061520625206352064520655206652067520685206952070520715207252073520745207552076520775207852079520805208152082520835208452085520865208752088520895209052091520925209352094520955209652097520985209952100521015210252103521045210552106521075210852109521105211152112521135211452115521165211752118521195212052121521225212352124521255212652127521285212952130521315213252133521345213552136521375213852139521405214152142521435214452145521465214752148521495215052151521525215352154521555215652157521585215952160521615216252163521645216552166521675216852169521705217152172521735217452175521765217752178521795218052181521825218352184521855218652187521885218952190521915219252193521945219552196521975219852199522005220152202522035220452205522065220752208522095221052211522125221352214522155221652217522185221952220522215222252223522245222552226522275222852229522305223152232522335223452235522365223752238522395224052241522425224352244522455224652247522485224952250522515225252253522545225552256522575225852259522605226152262522635226452265522665226752268522695227052271522725227352274522755227652277522785227952280522815228252283522845228552286522875228852289522905229152292522935229452295522965229752298522995230052301523025230352304523055230652307523085230952310523115231252313523145231552316523175231852319523205232152322523235232452325523265232752328523295233052331523325233352334523355233652337523385233952340523415234252343523445234552346523475234852349523505235152352523535235452355523565235752358523595236052361523625236352364523655236652367523685236952370523715237252373523745237552376523775237852379523805238152382523835238452385523865238752388523895239052391523925239352394523955239652397523985239952400524015240252403524045240552406524075240852409524105241152412524135241452415524165241752418524195242052421524225242352424524255242652427524285242952430524315243252433524345243552436524375243852439524405244152442524435244452445524465244752448524495245052451524525245352454524555245652457524585245952460524615246252463524645246552466524675246852469524705247152472524735247452475524765247752478524795248052481524825248352484524855248652487524885248952490524915249252493524945249552496524975249852499525005250152502525035250452505525065250752508525095251052511525125251352514525155251652517525185251952520525215252252523525245252552526525275252852529525305253152532525335253452535525365253752538525395254052541525425254352544525455254652547525485254952550525515255252553525545255552556525575255852559525605256152562525635256452565525665256752568525695257052571525725257352574525755257652577525785257952580525815258252583525845258552586525875258852589525905259152592525935259452595525965259752598525995260052601526025260352604526055260652607526085260952610526115261252613526145261552616526175261852619526205262152622526235262452625526265262752628526295263052631526325263352634526355263652637526385263952640526415264252643526445264552646526475264852649526505265152652526535265452655526565265752658526595266052661526625266352664526655266652667526685266952670526715267252673526745267552676526775267852679526805268152682526835268452685526865268752688526895269052691526925269352694526955269652697526985269952700527015270252703527045270552706527075270852709527105271152712527135271452715527165271752718527195272052721527225272352724527255272652727527285272952730527315273252733527345273552736527375273852739527405274152742527435274452745527465274752748527495275052751527525275352754527555275652757527585275952760527615276252763527645276552766527675276852769527705277152772527735277452775527765277752778527795278052781527825278352784527855278652787527885278952790527915279252793527945279552796527975279852799528005280152802528035280452805528065280752808528095281052811528125281352814528155281652817528185281952820528215282252823528245282552826528275282852829528305283152832528335283452835528365283752838528395284052841528425284352844528455284652847528485284952850528515285252853528545285552856528575285852859528605286152862528635286452865528665286752868528695287052871528725287352874528755287652877528785287952880528815288252883528845288552886528875288852889528905289152892528935289452895528965289752898528995290052901529025290352904529055290652907529085290952910529115291252913529145291552916529175291852919529205292152922529235292452925529265292752928529295293052931529325293352934529355293652937529385293952940529415294252943529445294552946529475294852949529505295152952529535295452955529565295752958529595296052961529625296352964529655296652967529685296952970529715297252973529745297552976529775297852979529805298152982529835298452985529865298752988529895299052991529925299352994529955299652997529985299953000530015300253003530045300553006530075300853009530105301153012530135301453015530165301753018530195302053021530225302353024530255302653027530285302953030530315303253033530345303553036530375303853039530405304153042530435304453045530465304753048530495305053051530525305353054530555305653057530585305953060530615306253063530645306553066530675306853069530705307153072530735307453075530765307753078530795308053081530825308353084530855308653087530885308953090530915309253093530945309553096530975309853099531005310153102531035310453105531065310753108531095311053111531125311353114531155311653117531185311953120531215312253123531245312553126531275312853129531305313153132531335313453135531365313753138531395314053141531425314353144531455314653147531485314953150531515315253153531545315553156531575315853159531605316153162531635316453165531665316753168531695317053171531725317353174531755317653177531785317953180531815318253183531845318553186531875318853189531905319153192531935319453195531965319753198531995320053201532025320353204532055320653207532085320953210532115321253213532145321553216532175321853219532205322153222532235322453225532265322753228532295323053231532325323353234532355323653237532385323953240532415324253243532445324553246532475324853249532505325153252532535325453255532565325753258532595326053261532625326353264532655326653267532685326953270532715327253273532745327553276532775327853279532805328153282532835328453285532865328753288532895329053291532925329353294532955329653297532985329953300533015330253303533045330553306533075330853309533105331153312533135331453315533165331753318533195332053321533225332353324533255332653327533285332953330533315333253333533345333553336533375333853339533405334153342533435334453345533465334753348533495335053351533525335353354533555335653357533585335953360533615336253363533645336553366533675336853369533705337153372533735337453375533765337753378533795338053381533825338353384533855338653387533885338953390533915339253393533945339553396533975339853399534005340153402534035340453405534065340753408534095341053411534125341353414534155341653417534185341953420534215342253423534245342553426534275342853429534305343153432534335343453435534365343753438534395344053441534425344353444534455344653447534485344953450534515345253453534545345553456534575345853459534605346153462534635346453465534665346753468534695347053471534725347353474534755347653477534785347953480534815348253483534845348553486534875348853489534905349153492534935349453495534965349753498534995350053501535025350353504535055350653507535085350953510535115351253513535145351553516535175351853519535205352153522535235352453525535265352753528535295353053531535325353353534535355353653537535385353953540535415354253543535445354553546535475354853549535505355153552535535355453555535565355753558535595356053561535625356353564535655356653567535685356953570535715357253573535745357553576535775357853579535805358153582535835358453585535865358753588535895359053591535925359353594535955359653597535985359953600536015360253603536045360553606536075360853609536105361153612536135361453615536165361753618536195362053621536225362353624536255362653627536285362953630536315363253633536345363553636536375363853639536405364153642536435364453645536465364753648536495365053651536525365353654536555365653657536585365953660536615366253663536645366553666536675366853669536705367153672536735367453675536765367753678536795368053681536825368353684536855368653687536885368953690536915369253693536945369553696536975369853699537005370153702537035370453705537065370753708537095371053711537125371353714537155371653717537185371953720537215372253723537245372553726537275372853729537305373153732537335373453735537365373753738537395374053741537425374353744537455374653747537485374953750537515375253753537545375553756537575375853759537605376153762537635376453765537665376753768537695377053771537725377353774537755377653777537785377953780537815378253783537845378553786537875378853789537905379153792537935379453795537965379753798537995380053801538025380353804538055380653807538085380953810538115381253813538145381553816538175381853819538205382153822538235382453825538265382753828538295383053831538325383353834538355383653837538385383953840538415384253843538445384553846538475384853849538505385153852538535385453855538565385753858538595386053861538625386353864538655386653867538685386953870538715387253873538745387553876538775387853879538805388153882538835388453885538865388753888538895389053891538925389353894538955389653897538985389953900539015390253903539045390553906539075390853909539105391153912539135391453915539165391753918539195392053921539225392353924539255392653927539285392953930539315393253933539345393553936539375393853939539405394153942539435394453945539465394753948539495395053951539525395353954539555395653957539585395953960539615396253963539645396553966539675396853969539705397153972539735397453975539765397753978539795398053981539825398353984539855398653987539885398953990539915399253993539945399553996539975399853999540005400154002540035400454005540065400754008540095401054011540125401354014540155401654017540185401954020540215402254023540245402554026540275402854029540305403154032540335403454035540365403754038540395404054041540425404354044540455404654047540485404954050540515405254053540545405554056540575405854059540605406154062540635406454065540665406754068540695407054071540725407354074540755407654077540785407954080540815408254083540845408554086540875408854089540905409154092540935409454095540965409754098540995410054101541025410354104541055410654107541085410954110541115411254113541145411554116541175411854119541205412154122541235412454125541265412754128541295413054131541325413354134541355413654137541385413954140541415414254143541445414554146541475414854149541505415154152541535415454155541565415754158541595416054161541625416354164541655416654167541685416954170541715417254173541745417554176541775417854179541805418154182541835418454185541865418754188541895419054191541925419354194541955419654197541985419954200542015420254203542045420554206542075420854209542105421154212542135421454215542165421754218542195422054221542225422354224542255422654227542285422954230542315423254233542345423554236542375423854239542405424154242542435424454245542465424754248542495425054251542525425354254542555425654257542585425954260542615426254263542645426554266542675426854269542705427154272542735427454275542765427754278542795428054281542825428354284542855428654287542885428954290542915429254293542945429554296542975429854299543005430154302543035430454305543065430754308543095431054311543125431354314543155431654317543185431954320543215432254323543245432554326543275432854329543305433154332543335433454335543365433754338543395434054341543425434354344543455434654347543485434954350543515435254353543545435554356543575435854359543605436154362543635436454365543665436754368543695437054371543725437354374543755437654377543785437954380543815438254383543845438554386543875438854389543905439154392543935439454395543965439754398543995440054401544025440354404544055440654407544085440954410544115441254413544145441554416544175441854419544205442154422544235442454425544265442754428544295443054431544325443354434544355443654437544385443954440544415444254443544445444554446544475444854449544505445154452544535445454455544565445754458544595446054461544625446354464544655446654467544685446954470544715447254473544745447554476544775447854479544805448154482544835448454485544865448754488544895449054491544925449354494544955449654497544985449954500545015450254503545045450554506545075450854509545105451154512545135451454515545165451754518545195452054521545225452354524545255452654527545285452954530545315453254533545345453554536545375453854539545405454154542545435454454545545465454754548545495455054551545525455354554545555455654557545585455954560545615456254563545645456554566545675456854569545705457154572545735457454575545765457754578545795458054581545825458354584545855458654587545885458954590545915459254593545945459554596545975459854599546005460154602546035460454605546065460754608546095461054611546125461354614546155461654617546185461954620546215462254623546245462554626546275462854629546305463154632546335463454635546365463754638546395464054641546425464354644546455464654647546485464954650546515465254653546545465554656546575465854659546605466154662546635466454665546665466754668546695467054671546725467354674546755467654677546785467954680546815468254683546845468554686546875468854689546905469154692546935469454695546965469754698546995470054701547025470354704547055470654707547085470954710547115471254713547145471554716547175471854719547205472154722547235472454725547265472754728547295473054731547325473354734547355473654737547385473954740547415474254743547445474554746547475474854749547505475154752547535475454755547565475754758547595476054761547625476354764547655476654767547685476954770547715477254773547745477554776547775477854779547805478154782547835478454785547865478754788547895479054791547925479354794547955479654797547985479954800548015480254803548045480554806548075480854809548105481154812548135481454815548165481754818548195482054821548225482354824548255482654827548285482954830548315483254833548345483554836548375483854839548405484154842548435484454845548465484754848548495485054851548525485354854548555485654857548585485954860548615486254863548645486554866548675486854869548705487154872548735487454875548765487754878548795488054881548825488354884548855488654887548885488954890548915489254893548945489554896548975489854899549005490154902549035490454905549065490754908549095491054911549125491354914549155491654917549185491954920549215492254923549245492554926549275492854929549305493154932549335493454935549365493754938549395494054941549425494354944549455494654947549485494954950549515495254953549545495554956549575495854959549605496154962549635496454965549665496754968549695497054971549725497354974549755497654977549785497954980549815498254983549845498554986549875498854989549905499154992549935499454995549965499754998549995500055001550025500355004550055500655007550085500955010550115501255013550145501555016550175501855019550205502155022550235502455025550265502755028550295503055031550325503355034550355503655037550385503955040550415504255043550445504555046550475504855049550505505155052550535505455055550565505755058550595506055061550625506355064550655506655067550685506955070550715507255073550745507555076550775507855079550805508155082550835508455085550865508755088550895509055091550925509355094550955509655097550985509955100551015510255103551045510555106551075510855109551105511155112551135511455115551165511755118551195512055121551225512355124551255512655127551285512955130551315513255133551345513555136551375513855139551405514155142551435514455145551465514755148551495515055151551525515355154551555515655157551585515955160551615516255163551645516555166551675516855169551705517155172551735517455175551765517755178551795518055181551825518355184551855518655187551885518955190551915519255193551945519555196551975519855199552005520155202552035520455205552065520755208552095521055211552125521355214552155521655217552185521955220552215522255223552245522555226552275522855229552305523155232552335523455235552365523755238552395524055241552425524355244552455524655247552485524955250552515525255253552545525555256552575525855259552605526155262552635526455265552665526755268552695527055271552725527355274552755527655277552785527955280552815528255283552845528555286552875528855289552905529155292552935529455295552965529755298552995530055301553025530355304553055530655307553085530955310553115531255313553145531555316553175531855319553205532155322553235532455325553265532755328553295533055331553325533355334553355533655337553385533955340553415534255343553445534555346553475534855349553505535155352553535535455355553565535755358553595536055361553625536355364553655536655367553685536955370553715537255373553745537555376553775537855379553805538155382553835538455385553865538755388553895539055391553925539355394553955539655397553985539955400554015540255403554045540555406554075540855409554105541155412554135541455415554165541755418554195542055421554225542355424554255542655427554285542955430554315543255433554345543555436554375543855439554405544155442554435544455445554465544755448554495545055451554525545355454554555545655457554585545955460554615546255463554645546555466554675546855469554705547155472554735547455475554765547755478554795548055481554825548355484554855548655487554885548955490554915549255493554945549555496554975549855499555005550155502555035550455505555065550755508555095551055511555125551355514555155551655517555185551955520555215552255523555245552555526555275552855529555305553155532555335553455535555365553755538555395554055541555425554355544555455554655547555485554955550555515555255553555545555555556555575555855559555605556155562555635556455565555665556755568555695557055571555725557355574555755557655577555785557955580555815558255583555845558555586555875558855589555905559155592555935559455595555965559755598555995560055601556025560355604556055560655607556085560955610556115561255613556145561555616556175561855619556205562155622556235562455625556265562755628556295563055631556325563355634556355563655637556385563955640556415564255643556445564555646556475564855649556505565155652556535565455655556565565755658556595566055661556625566355664556655566655667556685566955670556715567255673556745567555676556775567855679556805568155682556835568455685556865568755688556895569055691556925569355694556955569655697556985569955700557015570255703557045570555706557075570855709557105571155712557135571455715557165571755718557195572055721557225572355724557255572655727557285572955730557315573255733557345573555736557375573855739557405574155742557435574455745557465574755748557495575055751557525575355754557555575655757557585575955760557615576255763557645576555766557675576855769557705577155772557735577455775557765577755778557795578055781557825578355784557855578655787557885578955790557915579255793557945579555796557975579855799558005580155802558035580455805558065580755808558095581055811558125581355814558155581655817558185581955820558215582255823558245582555826558275582855829558305583155832558335583455835558365583755838558395584055841558425584355844558455584655847558485584955850558515585255853558545585555856558575585855859558605586155862558635586455865558665586755868558695587055871558725587355874558755587655877558785587955880558815588255883558845588555886558875588855889558905589155892558935589455895558965589755898558995590055901559025590355904559055590655907559085590955910559115591255913559145591555916559175591855919559205592155922559235592455925559265592755928559295593055931559325593355934559355593655937559385593955940559415594255943559445594555946559475594855949559505595155952559535595455955559565595755958559595596055961559625596355964559655596655967559685596955970559715597255973559745597555976559775597855979559805598155982559835598455985559865598755988559895599055991559925599355994559955599655997559985599956000560015600256003560045600556006560075600856009560105601156012560135601456015560165601756018560195602056021560225602356024560255602656027560285602956030560315603256033560345603556036560375603856039560405604156042560435604456045560465604756048560495605056051560525605356054560555605656057560585605956060560615606256063560645606556066560675606856069560705607156072560735607456075560765607756078560795608056081560825608356084560855608656087560885608956090560915609256093560945609556096560975609856099561005610156102561035610456105561065610756108561095611056111561125611356114561155611656117561185611956120561215612256123561245612556126561275612856129561305613156132561335613456135561365613756138561395614056141561425614356144561455614656147561485614956150561515615256153561545615556156561575615856159561605616156162561635616456165561665616756168561695617056171561725617356174561755617656177561785617956180561815618256183561845618556186561875618856189561905619156192561935619456195561965619756198561995620056201562025620356204562055620656207562085620956210562115621256213562145621556216562175621856219562205622156222562235622456225562265622756228562295623056231562325623356234562355623656237562385623956240562415624256243562445624556246562475624856249562505625156252562535625456255562565625756258562595626056261562625626356264562655626656267562685626956270562715627256273562745627556276562775627856279562805628156282562835628456285562865628756288562895629056291562925629356294562955629656297562985629956300563015630256303563045630556306563075630856309563105631156312563135631456315563165631756318563195632056321563225632356324563255632656327563285632956330563315633256333563345633556336563375633856339563405634156342563435634456345563465634756348563495635056351563525635356354563555635656357563585635956360563615636256363563645636556366563675636856369563705637156372563735637456375563765637756378563795638056381563825638356384563855638656387563885638956390563915639256393563945639556396563975639856399564005640156402564035640456405564065640756408564095641056411564125641356414564155641656417564185641956420564215642256423564245642556426564275642856429564305643156432564335643456435564365643756438564395644056441564425644356444564455644656447564485644956450564515645256453564545645556456564575645856459564605646156462564635646456465564665646756468564695647056471564725647356474564755647656477564785647956480564815648256483564845648556486564875648856489564905649156492564935649456495564965649756498564995650056501565025650356504565055650656507565085650956510565115651256513565145651556516565175651856519565205652156522565235652456525565265652756528565295653056531565325653356534565355653656537565385653956540565415654256543565445654556546565475654856549565505655156552565535655456555565565655756558565595656056561565625656356564565655656656567565685656956570565715657256573565745657556576565775657856579565805658156582565835658456585565865658756588565895659056591565925659356594565955659656597565985659956600566015660256603566045660556606566075660856609566105661156612566135661456615566165661756618566195662056621566225662356624566255662656627566285662956630566315663256633566345663556636566375663856639566405664156642566435664456645566465664756648566495665056651566525665356654566555665656657566585665956660566615666256663566645666556666566675666856669566705667156672566735667456675566765667756678566795668056681566825668356684566855668656687566885668956690566915669256693566945669556696566975669856699567005670156702567035670456705567065670756708567095671056711567125671356714567155671656717567185671956720567215672256723567245672556726567275672856729567305673156732567335673456735567365673756738567395674056741567425674356744567455674656747567485674956750567515675256753567545675556756567575675856759567605676156762567635676456765567665676756768567695677056771567725677356774567755677656777567785677956780567815678256783567845678556786567875678856789567905679156792567935679456795567965679756798567995680056801568025680356804568055680656807568085680956810568115681256813568145681556816568175681856819568205682156822568235682456825568265682756828568295683056831568325683356834568355683656837568385683956840568415684256843568445684556846568475684856849568505685156852568535685456855568565685756858568595686056861568625686356864568655686656867568685686956870568715687256873568745687556876568775687856879568805688156882568835688456885568865688756888568895689056891568925689356894568955689656897568985689956900569015690256903569045690556906569075690856909569105691156912569135691456915569165691756918569195692056921569225692356924569255692656927569285692956930569315693256933569345693556936569375693856939569405694156942569435694456945569465694756948569495695056951569525695356954569555695656957569585695956960569615696256963569645696556966569675696856969569705697156972569735697456975569765697756978569795698056981569825698356984569855698656987569885698956990569915699256993569945699556996569975699856999570005700157002570035700457005570065700757008570095701057011570125701357014570155701657017570185701957020570215702257023570245702557026570275702857029570305703157032570335703457035570365703757038570395704057041570425704357044570455704657047570485704957050570515705257053570545705557056570575705857059570605706157062570635706457065570665706757068570695707057071570725707357074570755707657077570785707957080570815708257083570845708557086570875708857089570905709157092570935709457095570965709757098570995710057101571025710357104571055710657107571085710957110571115711257113571145711557116571175711857119571205712157122571235712457125571265712757128571295713057131571325713357134571355713657137571385713957140571415714257143571445714557146571475714857149571505715157152571535715457155571565715757158571595716057161571625716357164571655716657167571685716957170571715717257173571745717557176571775717857179571805718157182571835718457185571865718757188571895719057191571925719357194571955719657197571985719957200572015720257203572045720557206572075720857209572105721157212572135721457215572165721757218572195722057221572225722357224572255722657227572285722957230572315723257233572345723557236572375723857239572405724157242572435724457245572465724757248572495725057251572525725357254572555725657257572585725957260572615726257263572645726557266572675726857269572705727157272572735727457275572765727757278572795728057281572825728357284572855728657287572885728957290572915729257293572945729557296572975729857299573005730157302573035730457305573065730757308573095731057311573125731357314573155731657317573185731957320573215732257323573245732557326573275732857329573305733157332573335733457335573365733757338573395734057341573425734357344573455734657347573485734957350573515735257353573545735557356573575735857359573605736157362573635736457365573665736757368573695737057371573725737357374573755737657377573785737957380573815738257383573845738557386573875738857389573905739157392573935739457395573965739757398573995740057401574025740357404574055740657407574085740957410574115741257413574145741557416574175741857419574205742157422574235742457425574265742757428574295743057431574325743357434574355743657437574385743957440574415744257443574445744557446574475744857449574505745157452574535745457455574565745757458574595746057461574625746357464574655746657467574685746957470574715747257473574745747557476574775747857479574805748157482574835748457485574865748757488574895749057491574925749357494574955749657497574985749957500575015750257503575045750557506575075750857509575105751157512575135751457515575165751757518575195752057521575225752357524575255752657527575285752957530575315753257533575345753557536575375753857539575405754157542575435754457545575465754757548575495755057551575525755357554575555755657557575585755957560575615756257563575645756557566575675756857569575705757157572575735757457575575765757757578575795758057581575825758357584575855758657587575885758957590575915759257593575945759557596575975759857599576005760157602576035760457605576065760757608576095761057611576125761357614576155761657617576185761957620576215762257623576245762557626576275762857629576305763157632576335763457635576365763757638576395764057641576425764357644576455764657647576485764957650576515765257653576545765557656576575765857659576605766157662576635766457665576665766757668576695767057671576725767357674576755767657677576785767957680576815768257683576845768557686576875768857689576905769157692576935769457695576965769757698576995770057701577025770357704577055770657707577085770957710577115771257713577145771557716577175771857719577205772157722577235772457725577265772757728577295773057731577325773357734577355773657737577385773957740577415774257743577445774557746577475774857749577505775157752577535775457755577565775757758577595776057761577625776357764577655776657767577685776957770577715777257773577745777557776577775777857779577805778157782577835778457785577865778757788577895779057791577925779357794577955779657797577985779957800578015780257803578045780557806578075780857809578105781157812578135781457815578165781757818578195782057821578225782357824578255782657827578285782957830578315783257833578345783557836578375783857839578405784157842578435784457845578465784757848578495785057851578525785357854578555785657857578585785957860578615786257863578645786557866578675786857869578705787157872578735787457875578765787757878578795788057881578825788357884578855788657887578885788957890578915789257893578945789557896578975789857899579005790157902579035790457905579065790757908579095791057911579125791357914579155791657917579185791957920579215792257923579245792557926579275792857929579305793157932579335793457935579365793757938579395794057941579425794357944579455794657947579485794957950579515795257953579545795557956579575795857959579605796157962579635796457965579665796757968579695797057971579725797357974579755797657977579785797957980579815798257983579845798557986579875798857989579905799157992579935799457995579965799757998579995800058001580025800358004580055800658007580085800958010580115801258013580145801558016580175801858019580205802158022580235802458025580265802758028580295803058031580325803358034580355803658037580385803958040580415804258043580445804558046580475804858049580505805158052580535805458055580565805758058580595806058061580625806358064580655806658067580685806958070580715807258073580745807558076580775807858079580805808158082580835808458085580865808758088580895809058091580925809358094580955809658097580985809958100581015810258103581045810558106581075810858109581105811158112581135811458115581165811758118581195812058121581225812358124581255812658127581285812958130581315813258133581345813558136581375813858139581405814158142581435814458145581465814758148581495815058151581525815358154581555815658157581585815958160581615816258163581645816558166581675816858169581705817158172581735817458175581765817758178581795818058181581825818358184581855818658187581885818958190581915819258193581945819558196581975819858199582005820158202582035820458205582065820758208582095821058211582125821358214582155821658217582185821958220582215822258223582245822558226582275822858229582305823158232582335823458235582365823758238582395824058241582425824358244582455824658247582485824958250582515825258253582545825558256582575825858259582605826158262582635826458265582665826758268582695827058271582725827358274582755827658277582785827958280582815828258283582845828558286582875828858289582905829158292582935829458295582965829758298582995830058301583025830358304583055830658307583085830958310583115831258313583145831558316583175831858319583205832158322583235832458325583265832758328583295833058331583325833358334583355833658337583385833958340583415834258343583445834558346583475834858349583505835158352583535835458355583565835758358583595836058361583625836358364583655836658367583685836958370583715837258373583745837558376583775837858379583805838158382583835838458385583865838758388583895839058391583925839358394583955839658397583985839958400584015840258403584045840558406584075840858409584105841158412584135841458415584165841758418584195842058421584225842358424584255842658427584285842958430584315843258433584345843558436584375843858439584405844158442584435844458445584465844758448584495845058451584525845358454584555845658457584585845958460584615846258463584645846558466584675846858469584705847158472584735847458475584765847758478584795848058481584825848358484584855848658487584885848958490584915849258493584945849558496584975849858499585005850158502585035850458505585065850758508585095851058511585125851358514585155851658517585185851958520585215852258523585245852558526585275852858529585305853158532585335853458535585365853758538585395854058541585425854358544585455854658547585485854958550585515855258553585545855558556585575855858559585605856158562585635856458565585665856758568585695857058571585725857358574585755857658577585785857958580585815858258583585845858558586585875858858589585905859158592585935859458595585965859758598585995860058601586025860358604586055860658607586085860958610586115861258613586145861558616586175861858619586205862158622586235862458625586265862758628586295863058631586325863358634586355863658637586385863958640586415864258643586445864558646586475864858649586505865158652586535865458655586565865758658586595866058661586625866358664586655866658667586685866958670586715867258673586745867558676586775867858679586805868158682586835868458685586865868758688586895869058691586925869358694586955869658697586985869958700587015870258703587045870558706587075870858709587105871158712587135871458715587165871758718587195872058721587225872358724587255872658727587285872958730587315873258733587345873558736587375873858739587405874158742587435874458745587465874758748587495875058751587525875358754587555875658757587585875958760587615876258763587645876558766587675876858769587705877158772587735877458775587765877758778587795878058781587825878358784587855878658787587885878958790587915879258793587945879558796587975879858799588005880158802588035880458805588065880758808588095881058811588125881358814588155881658817588185881958820588215882258823588245882558826588275882858829588305883158832588335883458835588365883758838588395884058841588425884358844588455884658847588485884958850588515885258853588545885558856588575885858859588605886158862588635886458865588665886758868588695887058871588725887358874588755887658877588785887958880588815888258883588845888558886588875888858889588905889158892588935889458895588965889758898588995890058901589025890358904589055890658907589085890958910589115891258913589145891558916589175891858919589205892158922589235892458925589265892758928589295893058931589325893358934589355893658937589385893958940589415894258943589445894558946589475894858949589505895158952589535895458955589565895758958589595896058961589625896358964589655896658967589685896958970589715897258973589745897558976589775897858979589805898158982589835898458985589865898758988589895899058991589925899358994589955899658997589985899959000590015900259003590045900559006590075900859009590105901159012590135901459015590165901759018590195902059021590225902359024590255902659027590285902959030590315903259033590345903559036590375903859039590405904159042590435904459045590465904759048590495905059051590525905359054590555905659057590585905959060590615906259063590645906559066590675906859069590705907159072590735907459075590765907759078590795908059081590825908359084590855908659087590885908959090590915909259093590945909559096590975909859099591005910159102591035910459105591065910759108591095911059111591125911359114591155911659117591185911959120591215912259123591245912559126591275912859129591305913159132591335913459135591365913759138591395914059141591425914359144591455914659147591485914959150591515915259153591545915559156591575915859159591605916159162591635916459165591665916759168591695917059171591725917359174591755917659177591785917959180591815918259183591845918559186591875918859189591905919159192591935919459195591965919759198591995920059201592025920359204592055920659207592085920959210592115921259213592145921559216592175921859219592205922159222592235922459225592265922759228592295923059231592325923359234592355923659237592385923959240592415924259243592445924559246592475924859249592505925159252592535925459255592565925759258592595926059261592625926359264592655926659267592685926959270592715927259273592745927559276592775927859279592805928159282592835928459285592865928759288592895929059291592925929359294592955929659297592985929959300593015930259303593045930559306593075930859309593105931159312593135931459315593165931759318593195932059321593225932359324593255932659327593285932959330593315933259333593345933559336593375933859339593405934159342593435934459345593465934759348593495935059351593525935359354593555935659357593585935959360593615936259363593645936559366593675936859369593705937159372593735937459375593765937759378593795938059381593825938359384593855938659387593885938959390593915939259393593945939559396593975939859399594005940159402594035940459405594065940759408594095941059411594125941359414594155941659417594185941959420594215942259423594245942559426594275942859429594305943159432594335943459435594365943759438594395944059441594425944359444594455944659447594485944959450594515945259453594545945559456594575945859459594605946159462594635946459465594665946759468594695947059471594725947359474594755947659477594785947959480594815948259483594845948559486594875948859489594905949159492594935949459495594965949759498594995950059501595025950359504595055950659507595085950959510595115951259513595145951559516595175951859519595205952159522595235952459525595265952759528595295953059531595325953359534595355953659537595385953959540595415954259543595445954559546595475954859549595505955159552595535955459555595565955759558595595956059561595625956359564595655956659567595685956959570595715957259573595745957559576595775957859579595805958159582595835958459585595865958759588595895959059591595925959359594595955959659597595985959959600596015960259603596045960559606596075960859609596105961159612596135961459615596165961759618596195962059621596225962359624596255962659627596285962959630596315963259633596345963559636596375963859639596405964159642596435964459645596465964759648596495965059651596525965359654596555965659657596585965959660596615966259663596645966559666596675966859669596705967159672596735967459675596765967759678596795968059681596825968359684596855968659687596885968959690596915969259693596945969559696596975969859699597005970159702597035970459705597065970759708597095971059711597125971359714597155971659717597185971959720597215972259723597245972559726597275972859729597305973159732597335973459735597365973759738597395974059741597425974359744597455974659747597485974959750597515975259753597545975559756597575975859759597605976159762597635976459765597665976759768597695977059771597725977359774597755977659777597785977959780597815978259783597845978559786597875978859789597905979159792597935979459795597965979759798597995980059801598025980359804598055980659807598085980959810598115981259813598145981559816598175981859819598205982159822598235982459825598265982759828598295983059831598325983359834598355983659837598385983959840598415984259843598445984559846598475984859849598505985159852598535985459855598565985759858598595986059861598625986359864598655986659867598685986959870598715987259873598745987559876598775987859879598805988159882598835988459885598865988759888598895989059891598925989359894598955989659897598985989959900599015990259903599045990559906599075990859909599105991159912599135991459915599165991759918599195992059921599225992359924599255992659927599285992959930599315993259933599345993559936599375993859939599405994159942599435994459945599465994759948599495995059951599525995359954599555995659957599585995959960599615996259963599645996559966599675996859969599705997159972599735997459975599765997759978599795998059981599825998359984599855998659987599885998959990599915999259993599945999559996599975999859999600006000160002600036000460005600066000760008600096001060011600126001360014600156001660017600186001960020600216002260023600246002560026600276002860029600306003160032600336003460035600366003760038600396004060041600426004360044600456004660047600486004960050600516005260053600546005560056600576005860059600606006160062600636006460065600666006760068600696007060071600726007360074600756007660077600786007960080600816008260083600846008560086600876008860089600906009160092600936009460095600966009760098600996010060101601026010360104601056010660107601086010960110601116011260113601146011560116601176011860119601206012160122601236012460125601266012760128601296013060131601326013360134601356013660137601386013960140601416014260143601446014560146601476014860149601506015160152601536015460155601566015760158601596016060161601626016360164601656016660167601686016960170601716017260173601746017560176601776017860179601806018160182601836018460185601866018760188601896019060191601926019360194601956019660197601986019960200602016020260203602046020560206602076020860209602106021160212602136021460215602166021760218602196022060221602226022360224602256022660227602286022960230602316023260233602346023560236602376023860239602406024160242602436024460245602466024760248602496025060251602526025360254602556025660257602586025960260602616026260263602646026560266602676026860269602706027160272602736027460275602766027760278602796028060281602826028360284602856028660287602886028960290602916029260293602946029560296602976029860299603006030160302603036030460305603066030760308603096031060311603126031360314603156031660317603186031960320603216032260323603246032560326603276032860329603306033160332603336033460335603366033760338603396034060341603426034360344603456034660347603486034960350603516035260353603546035560356603576035860359603606036160362603636036460365603666036760368603696037060371603726037360374603756037660377603786037960380603816038260383603846038560386603876038860389603906039160392603936039460395603966039760398603996040060401604026040360404604056040660407604086040960410604116041260413604146041560416604176041860419604206042160422604236042460425604266042760428604296043060431604326043360434604356043660437604386043960440604416044260443604446044560446604476044860449604506045160452604536045460455604566045760458604596046060461604626046360464604656046660467604686046960470604716047260473604746047560476604776047860479604806048160482604836048460485604866048760488604896049060491604926049360494604956049660497604986049960500605016050260503605046050560506605076050860509605106051160512605136051460515605166051760518605196052060521605226052360524605256052660527605286052960530605316053260533605346053560536605376053860539605406054160542605436054460545605466054760548605496055060551605526055360554605556055660557605586055960560605616056260563605646056560566605676056860569605706057160572605736057460575605766057760578605796058060581605826058360584605856058660587605886058960590605916059260593605946059560596605976059860599606006060160602606036060460605606066060760608606096061060611606126061360614606156061660617606186061960620606216062260623606246062560626606276062860629606306063160632606336063460635606366063760638606396064060641606426064360644606456064660647606486064960650606516065260653606546065560656606576065860659606606066160662606636066460665606666066760668606696067060671606726067360674606756067660677606786067960680606816068260683606846068560686606876068860689606906069160692606936069460695606966069760698606996070060701607026070360704607056070660707607086070960710607116071260713607146071560716607176071860719607206072160722607236072460725607266072760728607296073060731607326073360734607356073660737607386073960740607416074260743607446074560746607476074860749607506075160752607536075460755607566075760758607596076060761607626076360764607656076660767607686076960770607716077260773607746077560776607776077860779607806078160782607836078460785607866078760788607896079060791607926079360794607956079660797607986079960800608016080260803608046080560806608076080860809608106081160812608136081460815608166081760818608196082060821608226082360824608256082660827608286082960830608316083260833608346083560836608376083860839608406084160842608436084460845608466084760848608496085060851608526085360854608556085660857608586085960860608616086260863608646086560866608676086860869608706087160872608736087460875608766087760878608796088060881608826088360884608856088660887608886088960890608916089260893608946089560896608976089860899609006090160902609036090460905609066090760908609096091060911609126091360914609156091660917609186091960920609216092260923609246092560926609276092860929609306093160932609336093460935609366093760938609396094060941609426094360944609456094660947609486094960950609516095260953609546095560956609576095860959609606096160962609636096460965609666096760968609696097060971609726097360974609756097660977609786097960980609816098260983609846098560986609876098860989609906099160992609936099460995609966099760998609996100061001610026100361004610056100661007610086100961010610116101261013610146101561016610176101861019610206102161022610236102461025610266102761028610296103061031610326103361034610356103661037610386103961040610416104261043610446104561046610476104861049610506105161052610536105461055610566105761058610596106061061610626106361064610656106661067610686106961070610716107261073610746107561076610776107861079610806108161082610836108461085610866108761088610896109061091610926109361094610956109661097610986109961100611016110261103611046110561106611076110861109611106111161112611136111461115611166111761118611196112061121611226112361124611256112661127611286112961130611316113261133611346113561136611376113861139611406114161142611436114461145611466114761148611496115061151611526115361154611556115661157611586115961160611616116261163611646116561166611676116861169611706117161172611736117461175611766117761178611796118061181611826118361184611856118661187611886118961190611916119261193611946119561196611976119861199612006120161202612036120461205612066120761208612096121061211612126121361214612156121661217612186121961220612216122261223612246122561226612276122861229612306123161232612336123461235612366123761238612396124061241612426124361244612456124661247612486124961250612516125261253612546125561256612576125861259612606126161262612636126461265612666126761268612696127061271612726127361274612756127661277612786127961280612816128261283612846128561286612876128861289612906129161292612936129461295612966129761298612996130061301613026130361304613056130661307613086130961310613116131261313613146131561316613176131861319613206132161322613236132461325613266132761328613296133061331613326133361334613356133661337613386133961340613416134261343613446134561346613476134861349613506135161352613536135461355613566135761358613596136061361613626136361364613656136661367613686136961370613716137261373613746137561376613776137861379613806138161382613836138461385613866138761388613896139061391613926139361394613956139661397613986139961400614016140261403614046140561406614076140861409614106141161412614136141461415614166141761418614196142061421614226142361424614256142661427614286142961430614316143261433614346143561436614376143861439614406144161442614436144461445614466144761448614496145061451614526145361454614556145661457614586145961460614616146261463614646146561466614676146861469614706147161472614736147461475614766147761478614796148061481614826148361484614856148661487614886148961490614916149261493614946149561496614976149861499615006150161502615036150461505615066150761508615096151061511615126151361514615156151661517615186151961520615216152261523615246152561526615276152861529615306153161532615336153461535615366153761538615396154061541615426154361544615456154661547615486154961550615516155261553615546155561556615576155861559615606156161562615636156461565615666156761568615696157061571615726157361574615756157661577615786157961580615816158261583615846158561586615876158861589615906159161592615936159461595615966159761598615996160061601616026160361604616056160661607616086160961610616116161261613616146161561616616176161861619616206162161622616236162461625616266162761628616296163061631616326163361634616356163661637616386163961640616416164261643616446164561646616476164861649616506165161652616536165461655616566165761658616596166061661616626166361664616656166661667616686166961670616716167261673616746167561676616776167861679616806168161682616836168461685616866168761688616896169061691616926169361694616956169661697616986169961700617016170261703617046170561706617076170861709617106171161712617136171461715617166171761718617196172061721617226172361724617256172661727617286172961730617316173261733617346173561736617376173861739617406174161742617436174461745617466174761748617496175061751617526175361754617556175661757617586175961760617616176261763617646176561766617676176861769617706177161772617736177461775617766177761778617796178061781617826178361784617856178661787617886178961790617916179261793617946179561796617976179861799618006180161802618036180461805618066180761808618096181061811618126181361814618156181661817618186181961820618216182261823618246182561826618276182861829618306183161832618336183461835618366183761838618396184061841618426184361844618456184661847618486184961850618516185261853618546185561856618576185861859618606186161862618636186461865618666186761868618696187061871618726187361874618756187661877618786187961880618816188261883618846188561886618876188861889618906189161892618936189461895618966189761898618996190061901619026190361904619056190661907619086190961910619116191261913619146191561916619176191861919619206192161922619236192461925619266192761928619296193061931619326193361934619356193661937619386193961940619416194261943619446194561946619476194861949619506195161952619536195461955619566195761958619596196061961619626196361964619656196661967619686196961970619716197261973619746197561976619776197861979619806198161982619836198461985619866198761988619896199061991619926199361994619956199661997619986199962000620016200262003620046200562006620076200862009620106201162012620136201462015620166201762018620196202062021620226202362024620256202662027620286202962030620316203262033620346203562036620376203862039620406204162042620436204462045620466204762048620496205062051620526205362054620556205662057620586205962060620616206262063620646206562066620676206862069620706207162072620736207462075620766207762078620796208062081620826208362084620856208662087620886208962090620916209262093620946209562096620976209862099621006210162102621036210462105621066210762108621096211062111621126211362114621156211662117621186211962120621216212262123621246212562126621276212862129621306213162132621336213462135621366213762138621396214062141621426214362144621456214662147621486214962150621516215262153621546215562156621576215862159621606216162162621636216462165621666216762168621696217062171621726217362174621756217662177621786217962180621816218262183621846218562186621876218862189621906219162192621936219462195621966219762198621996220062201622026220362204622056220662207622086220962210622116221262213622146221562216622176221862219622206222162222622236222462225622266222762228622296223062231622326223362234622356223662237622386223962240622416224262243622446224562246622476224862249622506225162252622536225462255622566225762258622596226062261622626226362264622656226662267622686226962270622716227262273622746227562276622776227862279622806228162282622836228462285622866228762288622896229062291622926229362294622956229662297622986229962300623016230262303623046230562306623076230862309623106231162312623136231462315623166231762318623196232062321623226232362324623256232662327623286232962330623316233262333623346233562336623376233862339623406234162342623436234462345623466234762348623496235062351623526235362354623556235662357623586235962360623616236262363623646236562366623676236862369623706237162372623736237462375623766237762378623796238062381623826238362384623856238662387623886238962390623916239262393623946239562396623976239862399624006240162402624036240462405624066240762408624096241062411624126241362414624156241662417624186241962420624216242262423624246242562426624276242862429624306243162432624336243462435624366243762438624396244062441624426244362444624456244662447624486244962450624516245262453624546245562456624576245862459624606246162462624636246462465624666246762468624696247062471624726247362474624756247662477624786247962480624816248262483624846248562486624876248862489624906249162492624936249462495624966249762498624996250062501625026250362504625056250662507625086250962510625116251262513625146251562516625176251862519625206252162522625236252462525625266252762528625296253062531625326253362534625356253662537625386253962540625416254262543625446254562546625476254862549625506255162552625536255462555625566255762558625596256062561625626256362564625656256662567625686256962570625716257262573625746257562576625776257862579625806258162582625836258462585625866258762588625896259062591625926259362594625956259662597625986259962600626016260262603626046260562606626076260862609626106261162612626136261462615626166261762618626196262062621626226262362624626256262662627626286262962630626316263262633626346263562636626376263862639626406264162642626436264462645626466264762648626496265062651626526265362654626556265662657626586265962660626616266262663626646266562666626676266862669626706267162672626736267462675626766267762678626796268062681626826268362684626856268662687626886268962690626916269262693626946269562696626976269862699627006270162702627036270462705627066270762708627096271062711627126271362714627156271662717627186271962720627216272262723627246272562726627276272862729627306273162732627336273462735627366273762738627396274062741627426274362744627456274662747627486274962750627516275262753627546275562756627576275862759627606276162762627636276462765627666276762768627696277062771627726277362774627756277662777627786277962780627816278262783627846278562786627876278862789627906279162792627936279462795627966279762798627996280062801628026280362804628056280662807628086280962810628116281262813628146281562816628176281862819628206282162822628236282462825628266282762828628296283062831628326283362834628356283662837628386283962840628416284262843628446284562846628476284862849628506285162852628536285462855628566285762858628596286062861628626286362864628656286662867628686286962870628716287262873628746287562876628776287862879628806288162882628836288462885628866288762888628896289062891628926289362894628956289662897628986289962900629016290262903629046290562906629076290862909629106291162912629136291462915629166291762918629196292062921629226292362924629256292662927629286292962930629316293262933629346293562936629376293862939629406294162942629436294462945629466294762948629496295062951629526295362954629556295662957629586295962960629616296262963629646296562966629676296862969629706297162972629736297462975629766297762978629796298062981629826298362984629856298662987629886298962990629916299262993629946299562996629976299862999630006300163002630036300463005630066300763008630096301063011630126301363014630156301663017630186301963020630216302263023630246302563026630276302863029630306303163032630336303463035630366303763038630396304063041630426304363044630456304663047630486304963050630516305263053630546305563056630576305863059630606306163062630636306463065630666306763068630696307063071630726307363074630756307663077630786307963080630816308263083630846308563086630876308863089630906309163092630936309463095630966309763098630996310063101631026310363104631056310663107631086310963110631116311263113631146311563116631176311863119631206312163122631236312463125631266312763128631296313063131631326313363134631356313663137631386313963140631416314263143631446314563146631476314863149631506315163152631536315463155631566315763158631596316063161631626316363164631656316663167631686316963170631716317263173631746317563176631776317863179631806318163182631836318463185631866318763188631896319063191631926319363194631956319663197631986319963200632016320263203632046320563206632076320863209632106321163212632136321463215632166321763218632196322063221632226322363224632256322663227632286322963230632316323263233632346323563236632376323863239632406324163242632436324463245632466324763248632496325063251632526325363254632556325663257632586325963260632616326263263632646326563266632676326863269632706327163272632736327463275632766327763278632796328063281632826328363284632856328663287632886328963290632916329263293632946329563296632976329863299633006330163302633036330463305633066330763308633096331063311633126331363314633156331663317633186331963320633216332263323633246332563326633276332863329633306333163332633336333463335633366333763338633396334063341633426334363344633456334663347633486334963350633516335263353633546335563356633576335863359633606336163362633636336463365633666336763368633696337063371633726337363374633756337663377633786337963380633816338263383633846338563386633876338863389633906339163392633936339463395633966339763398633996340063401634026340363404634056340663407634086340963410634116341263413634146341563416634176341863419634206342163422634236342463425634266342763428634296343063431634326343363434634356343663437634386343963440634416344263443634446344563446634476344863449634506345163452634536345463455634566345763458634596346063461634626346363464634656346663467634686346963470634716347263473634746347563476634776347863479634806348163482634836348463485634866348763488634896349063491634926349363494634956349663497634986349963500635016350263503635046350563506635076350863509635106351163512635136351463515635166351763518635196352063521635226352363524635256352663527635286352963530635316353263533635346353563536635376353863539635406354163542635436354463545635466354763548635496355063551635526355363554635556355663557635586355963560635616356263563635646356563566635676356863569635706357163572635736357463575635766357763578635796358063581635826358363584635856358663587635886358963590635916359263593635946359563596635976359863599636006360163602636036360463605636066360763608636096361063611636126361363614636156361663617636186361963620636216362263623636246362563626636276362863629636306363163632636336363463635636366363763638636396364063641636426364363644636456364663647636486364963650636516365263653636546365563656636576365863659636606366163662636636366463665636666366763668636696367063671636726367363674636756367663677636786367963680636816368263683636846368563686636876368863689636906369163692636936369463695636966369763698636996370063701637026370363704637056370663707637086370963710637116371263713637146371563716637176371863719637206372163722637236372463725637266372763728637296373063731637326373363734637356373663737637386373963740637416374263743637446374563746637476374863749637506375163752637536375463755637566375763758637596376063761637626376363764637656376663767637686376963770637716377263773637746377563776637776377863779637806378163782637836378463785637866378763788637896379063791637926379363794637956379663797637986379963800638016380263803638046380563806638076380863809638106381163812638136381463815638166381763818638196382063821638226382363824638256382663827638286382963830638316383263833638346383563836638376383863839638406384163842638436384463845638466384763848638496385063851638526385363854638556385663857638586385963860638616386263863638646386563866638676386863869638706387163872638736387463875638766387763878638796388063881638826388363884638856388663887638886388963890638916389263893638946389563896638976389863899639006390163902639036390463905639066390763908639096391063911639126391363914639156391663917639186391963920639216392263923639246392563926639276392863929639306393163932639336393463935639366393763938639396394063941639426394363944639456394663947639486394963950639516395263953639546395563956639576395863959639606396163962639636396463965639666396763968639696397063971639726397363974639756397663977639786397963980639816398263983639846398563986639876398863989639906399163992639936399463995639966399763998639996400064001640026400364004640056400664007640086400964010640116401264013640146401564016640176401864019640206402164022640236402464025640266402764028640296403064031640326403364034640356403664037640386403964040640416404264043640446404564046640476404864049640506405164052640536405464055640566405764058640596406064061640626406364064640656406664067640686406964070640716407264073640746407564076640776407864079640806408164082640836408464085640866408764088640896409064091640926409364094640956409664097640986409964100641016410264103641046410564106641076410864109641106411164112641136411464115641166411764118641196412064121641226412364124641256412664127641286412964130641316413264133641346413564136641376413864139641406414164142641436414464145641466414764148641496415064151641526415364154641556415664157641586415964160641616416264163641646416564166641676416864169641706417164172641736417464175641766417764178641796418064181641826418364184641856418664187641886418964190641916419264193641946419564196641976419864199642006420164202642036420464205642066420764208642096421064211642126421364214642156421664217642186421964220642216422264223642246422564226642276422864229642306423164232642336423464235642366423764238642396424064241642426424364244642456424664247642486424964250642516425264253642546425564256642576425864259642606426164262642636426464265642666426764268642696427064271642726427364274642756427664277642786427964280642816428264283642846428564286642876428864289642906429164292642936429464295642966429764298642996430064301643026430364304643056430664307643086430964310643116431264313643146431564316643176431864319643206432164322643236432464325643266432764328643296433064331643326433364334643356433664337643386433964340643416434264343643446434564346643476434864349643506435164352643536435464355643566435764358643596436064361643626436364364643656436664367643686436964370643716437264373643746437564376643776437864379643806438164382643836438464385643866438764388643896439064391643926439364394643956439664397643986439964400644016440264403644046440564406644076440864409644106441164412644136441464415644166441764418644196442064421644226442364424644256442664427644286442964430644316443264433644346443564436644376443864439644406444164442644436444464445644466444764448644496445064451644526445364454644556445664457644586445964460644616446264463644646446564466644676446864469644706447164472644736447464475644766447764478644796448064481644826448364484644856448664487644886448964490644916449264493644946449564496644976449864499645006450164502645036450464505645066450764508645096451064511645126451364514645156451664517645186451964520645216452264523645246452564526645276452864529645306453164532645336453464535645366453764538645396454064541645426454364544645456454664547645486454964550645516455264553645546455564556645576455864559645606456164562645636456464565645666456764568645696457064571645726457364574645756457664577645786457964580645816458264583645846458564586645876458864589645906459164592645936459464595645966459764598645996460064601646026460364604646056460664607646086460964610646116461264613646146461564616646176461864619646206462164622646236462464625646266462764628646296463064631646326463364634646356463664637646386463964640646416464264643646446464564646646476464864649646506465164652646536465464655646566465764658646596466064661646626466364664646656466664667646686466964670646716467264673646746467564676646776467864679646806468164682646836468464685646866468764688646896469064691646926469364694646956469664697646986469964700647016470264703647046470564706647076470864709647106471164712647136471464715647166471764718647196472064721647226472364724647256472664727647286472964730647316473264733647346473564736647376473864739647406474164742647436474464745647466474764748647496475064751647526475364754647556475664757647586475964760647616476264763647646476564766647676476864769647706477164772647736477464775647766477764778647796478064781647826478364784647856478664787647886478964790647916479264793647946479564796647976479864799648006480164802648036480464805648066480764808648096481064811648126481364814648156481664817648186481964820648216482264823648246482564826648276482864829648306483164832648336483464835648366483764838648396484064841648426484364844648456484664847648486484964850648516485264853648546485564856648576485864859648606486164862648636486464865648666486764868648696487064871648726487364874648756487664877648786487964880648816488264883648846488564886648876488864889648906489164892648936489464895648966489764898648996490064901649026490364904649056490664907649086490964910649116491264913649146491564916649176491864919649206492164922649236492464925649266492764928649296493064931649326493364934649356493664937649386493964940649416494264943649446494564946649476494864949649506495164952649536495464955649566495764958649596496064961649626496364964649656496664967649686496964970649716497264973649746497564976649776497864979649806498164982649836498464985649866498764988649896499064991649926499364994649956499664997649986499965000650016500265003650046500565006650076500865009650106501165012650136501465015650166501765018650196502065021650226502365024650256502665027650286502965030650316503265033650346503565036650376503865039650406504165042650436504465045650466504765048650496505065051650526505365054650556505665057650586505965060650616506265063650646506565066650676506865069650706507165072650736507465075650766507765078650796508065081650826508365084650856508665087650886508965090650916509265093650946509565096650976509865099651006510165102651036510465105651066510765108651096511065111651126511365114651156511665117651186511965120651216512265123651246512565126651276512865129651306513165132651336513465135651366513765138651396514065141651426514365144651456514665147651486514965150651516515265153651546515565156651576515865159651606516165162651636516465165651666516765168651696517065171651726517365174651756517665177651786517965180651816518265183651846518565186651876518865189651906519165192651936519465195651966519765198651996520065201652026520365204652056520665207652086520965210652116521265213652146521565216652176521865219652206522165222652236522465225652266522765228652296523065231652326523365234652356523665237652386523965240652416524265243652446524565246652476524865249652506525165252652536525465255652566525765258652596526065261652626526365264652656526665267652686526965270652716527265273652746527565276652776527865279652806528165282652836528465285652866528765288652896529065291652926529365294652956529665297652986529965300653016530265303653046530565306653076530865309653106531165312653136531465315653166531765318653196532065321653226532365324653256532665327653286532965330653316533265333653346533565336653376533865339653406534165342653436534465345653466534765348653496535065351653526535365354653556535665357653586535965360653616536265363653646536565366653676536865369653706537165372653736537465375653766537765378653796538065381653826538365384653856538665387653886538965390653916539265393653946539565396653976539865399654006540165402654036540465405654066540765408654096541065411654126541365414654156541665417654186541965420654216542265423654246542565426654276542865429654306543165432654336543465435654366543765438654396544065441654426544365444654456544665447654486544965450654516545265453654546545565456654576545865459654606546165462654636546465465654666546765468654696547065471654726547365474654756547665477654786547965480654816548265483654846548565486654876548865489654906549165492654936549465495654966549765498654996550065501655026550365504655056550665507655086550965510655116551265513655146551565516655176551865519655206552165522655236552465525655266552765528655296553065531655326553365534655356553665537655386553965540655416554265543655446554565546655476554865549655506555165552655536555465555655566555765558655596556065561655626556365564655656556665567655686556965570655716557265573655746557565576655776557865579655806558165582655836558465585655866558765588655896559065591655926559365594655956559665597655986559965600656016560265603656046560565606656076560865609656106561165612656136561465615656166561765618656196562065621656226562365624656256562665627656286562965630656316563265633656346563565636656376563865639656406564165642656436564465645656466564765648656496565065651656526565365654656556565665657656586565965660656616566265663656646566565666656676566865669656706567165672656736567465675656766567765678656796568065681656826568365684656856568665687656886568965690656916569265693656946569565696656976569865699657006570165702657036570465705657066570765708657096571065711657126571365714657156571665717657186571965720657216572265723657246572565726657276572865729657306573165732657336573465735657366573765738657396574065741657426574365744657456574665747657486574965750657516575265753657546575565756657576575865759657606576165762657636576465765657666576765768657696577065771657726577365774657756577665777657786577965780657816578265783657846578565786657876578865789657906579165792657936579465795657966579765798657996580065801658026580365804658056580665807658086580965810658116581265813658146581565816658176581865819658206582165822658236582465825658266582765828658296583065831658326583365834658356583665837658386583965840658416584265843658446584565846658476584865849658506585165852658536585465855658566585765858658596586065861658626586365864658656586665867658686586965870658716587265873658746587565876658776587865879658806588165882658836588465885658866588765888658896589065891658926589365894658956589665897658986589965900659016590265903659046590565906659076590865909659106591165912659136591465915659166591765918659196592065921659226592365924659256592665927659286592965930659316593265933659346593565936659376593865939659406594165942659436594465945659466594765948659496595065951659526595365954659556595665957659586595965960659616596265963659646596565966659676596865969659706597165972659736597465975659766597765978659796598065981659826598365984659856598665987659886598965990659916599265993659946599565996659976599865999660006600166002660036600466005660066600766008660096601066011660126601366014660156601666017660186601966020660216602266023660246602566026660276602866029660306603166032660336603466035660366603766038660396604066041660426604366044660456604666047660486604966050660516605266053660546605566056660576605866059660606606166062660636606466065660666606766068660696607066071660726607366074660756607666077660786607966080660816608266083660846608566086660876608866089660906609166092660936609466095660966609766098660996610066101661026610366104661056610666107661086610966110661116611266113661146611566116661176611866119661206612166122661236612466125661266612766128661296613066131661326613366134661356613666137661386613966140661416614266143661446614566146661476614866149661506615166152661536615466155661566615766158661596616066161661626616366164661656616666167661686616966170661716617266173661746617566176661776617866179661806618166182661836618466185661866618766188661896619066191661926619366194661956619666197661986619966200662016620266203662046620566206662076620866209662106621166212662136621466215662166621766218662196622066221662226622366224662256622666227662286622966230662316623266233662346623566236662376623866239662406624166242662436624466245662466624766248662496625066251662526625366254662556625666257662586625966260662616626266263662646626566266662676626866269662706627166272662736627466275662766627766278662796628066281662826628366284662856628666287662886628966290662916629266293662946629566296662976629866299663006630166302663036630466305663066630766308663096631066311663126631366314663156631666317663186631966320663216632266323663246632566326663276632866329663306633166332663336633466335663366633766338663396634066341663426634366344663456634666347663486634966350663516635266353663546635566356663576635866359663606636166362663636636466365663666636766368663696637066371663726637366374663756637666377663786637966380663816638266383663846638566386663876638866389663906639166392663936639466395663966639766398663996640066401664026640366404664056640666407664086640966410664116641266413664146641566416664176641866419664206642166422664236642466425664266642766428664296643066431664326643366434664356643666437664386643966440664416644266443664446644566446664476644866449664506645166452664536645466455664566645766458664596646066461664626646366464664656646666467664686646966470664716647266473664746647566476664776647866479664806648166482664836648466485664866648766488664896649066491664926649366494664956649666497664986649966500665016650266503665046650566506665076650866509665106651166512665136651466515665166651766518665196652066521665226652366524665256652666527665286652966530665316653266533665346653566536665376653866539665406654166542665436654466545665466654766548665496655066551665526655366554665556655666557665586655966560665616656266563665646656566566665676656866569665706657166572665736657466575665766657766578665796658066581665826658366584665856658666587665886658966590665916659266593665946659566596665976659866599666006660166602666036660466605666066660766608666096661066611666126661366614666156661666617666186661966620666216662266623666246662566626666276662866629666306663166632666336663466635666366663766638666396664066641666426664366644666456664666647666486664966650666516665266653666546665566656666576665866659666606666166662666636666466665666666666766668666696667066671666726667366674666756667666677666786667966680666816668266683666846668566686666876668866689666906669166692666936669466695666966669766698666996670066701667026670366704667056670666707667086670966710667116671266713667146671566716667176671866719667206672166722667236672466725667266672766728667296673066731667326673366734667356673666737667386673966740667416674266743667446674566746667476674866749667506675166752667536675466755667566675766758667596676066761667626676366764667656676666767667686676966770667716677266773667746677566776667776677866779667806678166782667836678466785667866678766788667896679066791667926679366794667956679666797667986679966800668016680266803668046680566806668076680866809668106681166812668136681466815668166681766818668196682066821668226682366824668256682666827668286682966830668316683266833668346683566836668376683866839668406684166842668436684466845668466684766848668496685066851668526685366854668556685666857668586685966860668616686266863668646686566866668676686866869668706687166872668736687466875668766687766878668796688066881668826688366884668856688666887668886688966890668916689266893668946689566896668976689866899669006690166902669036690466905669066690766908669096691066911669126691366914669156691666917669186691966920669216692266923669246692566926669276692866929669306693166932669336693466935669366693766938669396694066941669426694366944669456694666947669486694966950669516695266953669546695566956669576695866959669606696166962669636696466965669666696766968669696697066971669726697366974669756697666977669786697966980669816698266983669846698566986669876698866989669906699166992669936699466995669966699766998669996700067001670026700367004670056700667007670086700967010670116701267013670146701567016670176701867019670206702167022670236702467025670266702767028670296703067031670326703367034670356703667037670386703967040670416704267043670446704567046670476704867049670506705167052670536705467055670566705767058670596706067061670626706367064670656706667067670686706967070670716707267073670746707567076670776707867079670806708167082670836708467085670866708767088670896709067091670926709367094670956709667097670986709967100671016710267103671046710567106671076710867109671106711167112671136711467115671166711767118671196712067121671226712367124671256712667127671286712967130671316713267133671346713567136671376713867139671406714167142671436714467145671466714767148671496715067151671526715367154671556715667157671586715967160671616716267163671646716567166671676716867169671706717167172671736717467175671766717767178671796718067181671826718367184671856718667187671886718967190671916719267193671946719567196671976719867199672006720167202672036720467205672066720767208672096721067211672126721367214672156721667217672186721967220672216722267223672246722567226672276722867229672306723167232672336723467235672366723767238672396724067241672426724367244672456724667247672486724967250672516725267253672546725567256672576725867259672606726167262672636726467265672666726767268672696727067271672726727367274672756727667277672786727967280672816728267283672846728567286672876728867289672906729167292672936729467295672966729767298672996730067301673026730367304673056730667307673086730967310673116731267313673146731567316673176731867319673206732167322673236732467325673266732767328673296733067331673326733367334673356733667337673386733967340673416734267343673446734567346673476734867349673506735167352673536735467355673566735767358673596736067361673626736367364673656736667367673686736967370673716737267373673746737567376673776737867379673806738167382673836738467385673866738767388673896739067391673926739367394673956739667397673986739967400674016740267403674046740567406674076740867409674106741167412674136741467415674166741767418674196742067421674226742367424674256742667427674286742967430674316743267433674346743567436674376743867439674406744167442674436744467445674466744767448674496745067451674526745367454674556745667457674586745967460674616746267463674646746567466674676746867469674706747167472674736747467475674766747767478674796748067481674826748367484674856748667487674886748967490674916749267493674946749567496674976749867499675006750167502675036750467505675066750767508675096751067511675126751367514675156751667517675186751967520675216752267523675246752567526675276752867529675306753167532675336753467535675366753767538675396754067541675426754367544675456754667547675486754967550675516755267553675546755567556675576755867559675606756167562675636756467565675666756767568675696757067571675726757367574675756757667577675786757967580675816758267583675846758567586675876758867589675906759167592675936759467595675966759767598675996760067601676026760367604676056760667607676086760967610676116761267613676146761567616676176761867619676206762167622676236762467625676266762767628676296763067631676326763367634676356763667637676386763967640676416764267643676446764567646676476764867649676506765167652676536765467655676566765767658676596766067661676626766367664676656766667667676686766967670676716767267673676746767567676676776767867679676806768167682676836768467685676866768767688676896769067691676926769367694676956769667697676986769967700677016770267703677046770567706677076770867709677106771167712677136771467715677166771767718677196772067721677226772367724677256772667727677286772967730677316773267733677346773567736677376773867739677406774167742677436774467745677466774767748677496775067751677526775367754677556775667757677586775967760677616776267763677646776567766677676776867769677706777167772677736777467775677766777767778677796778067781677826778367784677856778667787677886778967790677916779267793677946779567796677976779867799678006780167802678036780467805678066780767808678096781067811678126781367814678156781667817678186781967820678216782267823678246782567826678276782867829678306783167832678336783467835678366783767838678396784067841678426784367844678456784667847678486784967850678516785267853678546785567856678576785867859678606786167862678636786467865678666786767868678696787067871678726787367874678756787667877678786787967880678816788267883678846788567886678876788867889678906789167892678936789467895678966789767898678996790067901679026790367904679056790667907679086790967910679116791267913679146791567916679176791867919679206792167922679236792467925679266792767928679296793067931679326793367934679356793667937679386793967940679416794267943679446794567946679476794867949679506795167952679536795467955679566795767958679596796067961679626796367964679656796667967679686796967970679716797267973679746797567976679776797867979679806798167982679836798467985679866798767988679896799067991679926799367994679956799667997679986799968000680016800268003680046800568006680076800868009680106801168012680136801468015680166801768018680196802068021680226802368024680256802668027680286802968030680316803268033680346803568036680376803868039680406804168042680436804468045680466804768048680496805068051680526805368054680556805668057680586805968060680616806268063680646806568066680676806868069680706807168072680736807468075680766807768078680796808068081680826808368084680856808668087680886808968090680916809268093680946809568096680976809868099681006810168102681036810468105681066810768108681096811068111681126811368114681156811668117681186811968120681216812268123681246812568126681276812868129681306813168132681336813468135681366813768138681396814068141681426814368144681456814668147681486814968150681516815268153681546815568156681576815868159681606816168162681636816468165681666816768168681696817068171681726817368174681756817668177681786817968180681816818268183681846818568186681876818868189681906819168192681936819468195681966819768198681996820068201682026820368204682056820668207682086820968210682116821268213682146821568216682176821868219682206822168222682236822468225682266822768228682296823068231682326823368234682356823668237682386823968240682416824268243682446824568246682476824868249682506825168252682536825468255682566825768258682596826068261682626826368264682656826668267682686826968270682716827268273682746827568276682776827868279682806828168282682836828468285682866828768288682896829068291682926829368294682956829668297682986829968300683016830268303683046830568306683076830868309683106831168312683136831468315683166831768318683196832068321683226832368324683256832668327683286832968330683316833268333683346833568336683376833868339683406834168342683436834468345683466834768348683496835068351683526835368354683556835668357683586835968360683616836268363683646836568366683676836868369683706837168372683736837468375683766837768378683796838068381683826838368384683856838668387683886838968390683916839268393683946839568396683976839868399684006840168402684036840468405684066840768408684096841068411684126841368414684156841668417684186841968420684216842268423684246842568426684276842868429684306843168432684336843468435684366843768438684396844068441684426844368444684456844668447684486844968450684516845268453684546845568456684576845868459684606846168462684636846468465684666846768468684696847068471684726847368474684756847668477684786847968480684816848268483684846848568486684876848868489684906849168492684936849468495684966849768498684996850068501685026850368504685056850668507685086850968510685116851268513685146851568516685176851868519685206852168522685236852468525685266852768528685296853068531685326853368534685356853668537685386853968540685416854268543685446854568546685476854868549685506855168552685536855468555685566855768558685596856068561685626856368564685656856668567685686856968570685716857268573685746857568576685776857868579685806858168582685836858468585685866858768588685896859068591685926859368594685956859668597685986859968600686016860268603686046860568606686076860868609686106861168612686136861468615686166861768618686196862068621686226862368624686256862668627686286862968630686316863268633686346863568636686376863868639686406864168642686436864468645686466864768648686496865068651686526865368654686556865668657686586865968660686616866268663686646866568666686676866868669686706867168672686736867468675686766867768678686796868068681686826868368684686856868668687686886868968690686916869268693686946869568696686976869868699687006870168702687036870468705687066870768708687096871068711687126871368714687156871668717687186871968720687216872268723687246872568726687276872868729687306873168732687336873468735687366873768738687396874068741687426874368744687456874668747687486874968750687516875268753687546875568756687576875868759687606876168762687636876468765687666876768768687696877068771687726877368774687756877668777687786877968780687816878268783687846878568786687876878868789687906879168792687936879468795687966879768798687996880068801688026880368804688056880668807688086880968810688116881268813688146881568816688176881868819688206882168822688236882468825688266882768828688296883068831688326883368834688356883668837688386883968840688416884268843688446884568846688476884868849688506885168852688536885468855688566885768858688596886068861688626886368864688656886668867688686886968870688716887268873688746887568876688776887868879688806888168882688836888468885688866888768888688896889068891688926889368894688956889668897688986889968900689016890268903689046890568906689076890868909689106891168912689136891468915689166891768918689196892068921689226892368924689256892668927689286892968930689316893268933689346893568936689376893868939689406894168942689436894468945689466894768948689496895068951689526895368954689556895668957689586895968960689616896268963689646896568966689676896868969689706897168972689736897468975689766897768978689796898068981689826898368984689856898668987689886898968990689916899268993689946899568996689976899868999690006900169002690036900469005690066900769008690096901069011690126901369014690156901669017690186901969020690216902269023690246902569026690276902869029690306903169032690336903469035690366903769038690396904069041690426904369044690456904669047690486904969050690516905269053690546905569056690576905869059690606906169062690636906469065690666906769068690696907069071690726907369074690756907669077690786907969080690816908269083690846908569086690876908869089690906909169092690936909469095690966909769098690996910069101691026910369104691056910669107691086910969110691116911269113691146911569116691176911869119691206912169122691236912469125691266912769128691296913069131691326913369134691356913669137691386913969140691416914269143691446914569146691476914869149691506915169152691536915469155691566915769158691596916069161691626916369164691656916669167691686916969170691716917269173691746917569176691776917869179691806918169182691836918469185691866918769188691896919069191691926919369194691956919669197691986919969200692016920269203692046920569206692076920869209692106921169212692136921469215692166921769218692196922069221692226922369224692256922669227692286922969230692316923269233692346923569236692376923869239692406924169242692436924469245692466924769248692496925069251692526925369254692556925669257692586925969260692616926269263692646926569266692676926869269692706927169272692736927469275692766927769278692796928069281692826928369284692856928669287692886928969290692916929269293692946929569296692976929869299693006930169302693036930469305693066930769308693096931069311693126931369314693156931669317693186931969320693216932269323693246932569326693276932869329693306933169332693336933469335693366933769338693396934069341693426934369344693456934669347693486934969350693516935269353693546935569356693576935869359693606936169362693636936469365693666936769368693696937069371693726937369374693756937669377693786937969380693816938269383693846938569386693876938869389693906939169392693936939469395693966939769398693996940069401694026940369404694056940669407694086940969410694116941269413694146941569416694176941869419694206942169422694236942469425694266942769428694296943069431694326943369434694356943669437694386943969440694416944269443694446944569446694476944869449694506945169452694536945469455694566945769458694596946069461694626946369464694656946669467694686946969470694716947269473694746947569476694776947869479694806948169482694836948469485694866948769488694896949069491694926949369494694956949669497694986949969500695016950269503695046950569506695076950869509695106951169512695136951469515695166951769518695196952069521695226952369524695256952669527695286952969530695316953269533695346953569536695376953869539695406954169542695436954469545695466954769548695496955069551695526955369554695556955669557695586955969560695616956269563695646956569566695676956869569695706957169572695736957469575695766957769578695796958069581695826958369584695856958669587695886958969590695916959269593695946959569596695976959869599696006960169602696036960469605696066960769608696096961069611696126961369614696156961669617696186961969620696216962269623696246962569626696276962869629696306963169632696336963469635696366963769638696396964069641696426964369644696456964669647696486964969650696516965269653696546965569656696576965869659696606966169662696636966469665696666966769668696696967069671696726967369674696756967669677696786967969680696816968269683696846968569686696876968869689696906969169692696936969469695696966969769698696996970069701697026970369704697056970669707697086970969710697116971269713697146971569716697176971869719697206972169722697236972469725697266972769728697296973069731697326973369734697356973669737697386973969740697416974269743697446974569746697476974869749697506975169752697536975469755697566975769758697596976069761697626976369764697656976669767697686976969770697716977269773697746977569776697776977869779697806978169782697836978469785697866978769788697896979069791697926979369794697956979669797697986979969800698016980269803698046980569806698076980869809698106981169812698136981469815698166981769818698196982069821698226982369824698256982669827698286982969830698316983269833698346983569836698376983869839698406984169842698436984469845698466984769848698496985069851698526985369854698556985669857698586985969860698616986269863698646986569866698676986869869698706987169872698736987469875698766987769878698796988069881698826988369884698856988669887698886988969890698916989269893698946989569896698976989869899699006990169902699036990469905699066990769908699096991069911699126991369914699156991669917699186991969920699216992269923699246992569926699276992869929699306993169932699336993469935699366993769938699396994069941699426994369944699456994669947699486994969950699516995269953699546995569956699576995869959699606996169962699636996469965699666996769968699696997069971699726997369974699756997669977699786997969980699816998269983699846998569986699876998869989699906999169992699936999469995699966999769998699997000070001700027000370004700057000670007700087000970010700117001270013700147001570016700177001870019700207002170022700237002470025700267002770028700297003070031700327003370034700357003670037700387003970040700417004270043700447004570046700477004870049700507005170052700537005470055700567005770058700597006070061700627006370064700657006670067700687006970070700717007270073700747007570076700777007870079700807008170082700837008470085700867008770088700897009070091700927009370094700957009670097700987009970100701017010270103701047010570106701077010870109701107011170112701137011470115701167011770118701197012070121701227012370124701257012670127701287012970130701317013270133701347013570136701377013870139701407014170142701437014470145701467014770148701497015070151701527015370154701557015670157701587015970160701617016270163701647016570166701677016870169701707017170172701737017470175701767017770178701797018070181701827018370184701857018670187701887018970190701917019270193701947019570196701977019870199702007020170202702037020470205702067020770208702097021070211702127021370214702157021670217702187021970220702217022270223702247022570226702277022870229702307023170232702337023470235702367023770238702397024070241702427024370244702457024670247702487024970250702517025270253702547025570256702577025870259702607026170262702637026470265702667026770268702697027070271702727027370274702757027670277702787027970280702817028270283702847028570286702877028870289702907029170292702937029470295702967029770298702997030070301703027030370304703057030670307703087030970310703117031270313703147031570316703177031870319703207032170322703237032470325703267032770328703297033070331703327033370334703357033670337703387033970340703417034270343703447034570346703477034870349703507035170352703537035470355703567035770358703597036070361703627036370364703657036670367703687036970370703717037270373703747037570376703777037870379703807038170382703837038470385703867038770388703897039070391703927039370394703957039670397703987039970400704017040270403704047040570406704077040870409704107041170412704137041470415704167041770418704197042070421704227042370424704257042670427704287042970430704317043270433704347043570436704377043870439704407044170442704437044470445704467044770448704497045070451704527045370454704557045670457704587045970460704617046270463704647046570466704677046870469704707047170472704737047470475704767047770478704797048070481704827048370484704857048670487704887048970490704917049270493704947049570496704977049870499705007050170502705037050470505705067050770508705097051070511705127051370514705157051670517705187051970520705217052270523705247052570526705277052870529705307053170532705337053470535705367053770538705397054070541705427054370544705457054670547705487054970550705517055270553705547055570556705577055870559705607056170562705637056470565705667056770568705697057070571705727057370574705757057670577705787057970580705817058270583705847058570586705877058870589705907059170592705937059470595705967059770598705997060070601706027060370604706057060670607706087060970610706117061270613706147061570616706177061870619706207062170622706237062470625706267062770628706297063070631706327063370634706357063670637706387063970640706417064270643706447064570646706477064870649706507065170652706537065470655706567065770658706597066070661706627066370664706657066670667706687066970670706717067270673706747067570676706777067870679706807068170682706837068470685706867068770688706897069070691706927069370694706957069670697706987069970700707017070270703707047070570706707077070870709707107071170712707137071470715707167071770718707197072070721707227072370724707257072670727707287072970730707317073270733707347073570736707377073870739707407074170742707437074470745707467074770748707497075070751707527075370754707557075670757707587075970760707617076270763707647076570766707677076870769707707077170772707737077470775707767077770778707797078070781707827078370784707857078670787707887078970790707917079270793707947079570796707977079870799708007080170802708037080470805708067080770808708097081070811708127081370814708157081670817708187081970820708217082270823708247082570826708277082870829708307083170832708337083470835708367083770838708397084070841708427084370844708457084670847708487084970850708517085270853708547085570856708577085870859708607086170862708637086470865708667086770868708697087070871708727087370874708757087670877708787087970880708817088270883708847088570886708877088870889708907089170892708937089470895708967089770898708997090070901709027090370904709057090670907709087090970910709117091270913709147091570916709177091870919709207092170922709237092470925709267092770928709297093070931709327093370934709357093670937709387093970940709417094270943709447094570946709477094870949709507095170952709537095470955709567095770958709597096070961709627096370964709657096670967709687096970970709717097270973709747097570976709777097870979709807098170982709837098470985709867098770988709897099070991709927099370994709957099670997709987099971000710017100271003710047100571006710077100871009710107101171012710137101471015710167101771018710197102071021710227102371024710257102671027710287102971030710317103271033710347103571036710377103871039710407104171042710437104471045710467104771048710497105071051710527105371054710557105671057710587105971060710617106271063710647106571066710677106871069710707107171072710737107471075710767107771078710797108071081710827108371084710857108671087710887108971090710917109271093710947109571096710977109871099711007110171102711037110471105711067110771108711097111071111711127111371114711157111671117711187111971120711217112271123711247112571126711277112871129711307113171132711337113471135711367113771138711397114071141711427114371144711457114671147711487114971150711517115271153711547115571156711577115871159711607116171162711637116471165711667116771168711697117071171711727117371174711757117671177711787117971180711817118271183711847118571186711877118871189711907119171192711937119471195711967119771198711997120071201712027120371204712057120671207712087120971210712117121271213712147121571216712177121871219712207122171222712237122471225712267122771228712297123071231712327123371234712357123671237712387123971240712417124271243712447124571246712477124871249712507125171252712537125471255712567125771258712597126071261712627126371264712657126671267712687126971270712717127271273712747127571276712777127871279712807128171282712837128471285712867128771288712897129071291712927129371294712957129671297712987129971300713017130271303713047130571306713077130871309713107131171312713137131471315713167131771318713197132071321713227132371324713257132671327713287132971330713317133271333713347133571336713377133871339713407134171342713437134471345713467134771348713497135071351713527135371354713557135671357713587135971360713617136271363713647136571366713677136871369713707137171372713737137471375713767137771378713797138071381713827138371384713857138671387713887138971390713917139271393713947139571396713977139871399714007140171402714037140471405714067140771408714097141071411714127141371414714157141671417714187141971420714217142271423714247142571426714277142871429714307143171432714337143471435714367143771438714397144071441714427144371444714457144671447714487144971450714517145271453714547145571456714577145871459714607146171462714637146471465714667146771468714697147071471714727147371474714757147671477714787147971480714817148271483714847148571486714877148871489714907149171492714937149471495714967149771498714997150071501715027150371504715057150671507715087150971510715117151271513715147151571516715177151871519715207152171522715237152471525715267152771528715297153071531715327153371534715357153671537715387153971540715417154271543715447154571546715477154871549715507155171552715537155471555715567155771558715597156071561715627156371564715657156671567715687156971570715717157271573715747157571576715777157871579715807158171582715837158471585715867158771588715897159071591715927159371594715957159671597715987159971600716017160271603716047160571606716077160871609716107161171612716137161471615716167161771618716197162071621716227162371624716257162671627716287162971630716317163271633716347163571636716377163871639716407164171642716437164471645716467164771648716497165071651716527165371654716557165671657716587165971660716617166271663716647166571666716677166871669716707167171672716737167471675716767167771678716797168071681716827168371684716857168671687716887168971690716917169271693716947169571696716977169871699717007170171702717037170471705717067170771708717097171071711717127171371714717157171671717717187171971720717217172271723717247172571726717277172871729717307173171732717337173471735717367173771738717397174071741717427174371744717457174671747717487174971750717517175271753717547175571756717577175871759717607176171762717637176471765717667176771768717697177071771717727177371774717757177671777717787177971780717817178271783717847178571786717877178871789717907179171792717937179471795717967179771798717997180071801718027180371804718057180671807718087180971810718117181271813718147181571816718177181871819718207182171822718237182471825718267182771828718297183071831718327183371834718357183671837718387183971840718417184271843718447184571846718477184871849718507185171852718537185471855718567185771858718597186071861718627186371864718657186671867718687186971870718717187271873718747187571876718777187871879718807188171882718837188471885718867188771888718897189071891718927189371894718957189671897718987189971900719017190271903719047190571906719077190871909719107191171912719137191471915719167191771918719197192071921719227192371924719257192671927719287192971930719317193271933719347193571936719377193871939719407194171942719437194471945719467194771948719497195071951719527195371954719557195671957719587195971960719617196271963719647196571966719677196871969719707197171972719737197471975719767197771978719797198071981719827198371984719857198671987719887198971990719917199271993719947199571996719977199871999720007200172002720037200472005720067200772008720097201072011720127201372014720157201672017720187201972020720217202272023720247202572026720277202872029720307203172032720337203472035720367203772038720397204072041720427204372044720457204672047720487204972050720517205272053720547205572056720577205872059720607206172062720637206472065720667206772068720697207072071720727207372074720757207672077720787207972080720817208272083720847208572086720877208872089720907209172092720937209472095720967209772098720997210072101721027210372104721057210672107721087210972110721117211272113721147211572116721177211872119721207212172122721237212472125721267212772128721297213072131721327213372134721357213672137721387213972140721417214272143721447214572146721477214872149721507215172152721537215472155721567215772158721597216072161721627216372164721657216672167721687216972170721717217272173721747217572176721777217872179721807218172182721837218472185721867218772188721897219072191721927219372194721957219672197721987219972200722017220272203722047220572206722077220872209722107221172212722137221472215722167221772218722197222072221722227222372224722257222672227722287222972230722317223272233722347223572236722377223872239722407224172242722437224472245722467224772248722497225072251722527225372254722557225672257722587225972260722617226272263722647226572266722677226872269722707227172272722737227472275722767227772278722797228072281722827228372284722857228672287722887228972290722917229272293722947229572296722977229872299723007230172302723037230472305723067230772308723097231072311723127231372314723157231672317723187231972320723217232272323723247232572326723277232872329723307233172332723337233472335723367233772338723397234072341723427234372344723457234672347723487234972350723517235272353723547235572356723577235872359723607236172362723637236472365723667236772368723697237072371723727237372374723757237672377723787237972380723817238272383723847238572386723877238872389723907239172392723937239472395723967239772398723997240072401724027240372404724057240672407724087240972410724117241272413724147241572416724177241872419724207242172422724237242472425724267242772428724297243072431724327243372434724357243672437724387243972440724417244272443724447244572446724477244872449724507245172452724537245472455724567245772458724597246072461724627246372464724657246672467724687246972470724717247272473724747247572476724777247872479724807248172482724837248472485724867248772488724897249072491724927249372494724957249672497724987249972500725017250272503725047250572506725077250872509725107251172512725137251472515725167251772518725197252072521725227252372524725257252672527725287252972530725317253272533725347253572536725377253872539725407254172542725437254472545725467254772548725497255072551725527255372554725557255672557725587255972560725617256272563725647256572566725677256872569725707257172572725737257472575725767257772578725797258072581725827258372584725857258672587725887258972590725917259272593725947259572596725977259872599726007260172602726037260472605726067260772608726097261072611726127261372614726157261672617726187261972620726217262272623726247262572626726277262872629726307263172632726337263472635726367263772638726397264072641726427264372644726457264672647726487264972650726517265272653726547265572656726577265872659726607266172662726637266472665726667266772668726697267072671726727267372674726757267672677726787267972680726817268272683726847268572686726877268872689726907269172692726937269472695726967269772698726997270072701727027270372704727057270672707727087270972710727117271272713727147271572716727177271872719727207272172722727237272472725727267272772728727297273072731727327273372734727357273672737727387273972740727417274272743727447274572746727477274872749727507275172752727537275472755727567275772758727597276072761727627276372764727657276672767727687276972770727717277272773727747277572776727777277872779727807278172782727837278472785727867278772788727897279072791727927279372794727957279672797727987279972800728017280272803728047280572806728077280872809728107281172812728137281472815728167281772818728197282072821728227282372824728257282672827728287282972830728317283272833728347283572836728377283872839728407284172842728437284472845728467284772848728497285072851728527285372854728557285672857728587285972860728617286272863728647286572866728677286872869728707287172872728737287472875728767287772878728797288072881728827288372884728857288672887728887288972890728917289272893728947289572896728977289872899729007290172902729037290472905729067290772908729097291072911729127291372914729157291672917729187291972920729217292272923729247292572926729277292872929729307293172932729337293472935729367293772938729397294072941729427294372944729457294672947729487294972950729517295272953729547295572956729577295872959729607296172962729637296472965729667296772968729697297072971729727297372974729757297672977729787297972980729817298272983729847298572986729877298872989729907299172992729937299472995729967299772998729997300073001730027300373004730057300673007730087300973010730117301273013730147301573016730177301873019730207302173022730237302473025730267302773028730297303073031730327303373034730357303673037730387303973040730417304273043730447304573046730477304873049730507305173052730537305473055730567305773058730597306073061730627306373064730657306673067730687306973070730717307273073730747307573076730777307873079730807308173082730837308473085730867308773088730897309073091730927309373094730957309673097730987309973100731017310273103731047310573106731077310873109731107311173112731137311473115731167311773118731197312073121731227312373124731257312673127731287312973130731317313273133731347313573136731377313873139731407314173142731437314473145731467314773148731497315073151731527315373154731557315673157731587315973160731617316273163731647316573166731677316873169731707317173172731737317473175731767317773178731797318073181731827318373184731857318673187731887318973190731917319273193731947319573196731977319873199732007320173202732037320473205732067320773208732097321073211732127321373214732157321673217732187321973220732217322273223732247322573226732277322873229732307323173232732337323473235732367323773238732397324073241732427324373244732457324673247732487324973250732517325273253732547325573256732577325873259732607326173262732637326473265732667326773268732697327073271732727327373274732757327673277732787327973280732817328273283732847328573286732877328873289732907329173292732937329473295732967329773298732997330073301733027330373304733057330673307733087330973310733117331273313733147331573316733177331873319733207332173322733237332473325733267332773328733297333073331733327333373334733357333673337733387333973340733417334273343733447334573346733477334873349733507335173352733537335473355733567335773358733597336073361733627336373364733657336673367733687336973370733717337273373733747337573376733777337873379733807338173382733837338473385733867338773388733897339073391733927339373394733957339673397733987339973400734017340273403734047340573406734077340873409734107341173412734137341473415734167341773418734197342073421734227342373424734257342673427734287342973430734317343273433734347343573436734377343873439734407344173442734437344473445734467344773448734497345073451734527345373454734557345673457734587345973460734617346273463734647346573466734677346873469734707347173472734737347473475734767347773478734797348073481734827348373484734857348673487734887348973490734917349273493734947349573496734977349873499735007350173502735037350473505735067350773508735097351073511735127351373514735157351673517735187351973520735217352273523735247352573526735277352873529735307353173532735337353473535735367353773538735397354073541735427354373544735457354673547735487354973550735517355273553735547355573556735577355873559735607356173562735637356473565735667356773568735697357073571735727357373574735757357673577735787357973580735817358273583735847358573586735877358873589735907359173592735937359473595735967359773598735997360073601736027360373604736057360673607736087360973610736117361273613736147361573616736177361873619736207362173622736237362473625736267362773628736297363073631736327363373634736357363673637736387363973640736417364273643736447364573646736477364873649736507365173652736537365473655736567365773658736597366073661736627366373664736657366673667736687366973670736717367273673736747367573676736777367873679736807368173682736837368473685736867368773688736897369073691736927369373694736957369673697736987369973700737017370273703737047370573706737077370873709737107371173712737137371473715737167371773718737197372073721737227372373724737257372673727737287372973730737317373273733737347373573736737377373873739737407374173742737437374473745737467374773748737497375073751737527375373754737557375673757737587375973760737617376273763737647376573766737677376873769737707377173772737737377473775737767377773778737797378073781737827378373784737857378673787737887378973790737917379273793737947379573796737977379873799738007380173802738037380473805738067380773808738097381073811738127381373814738157381673817738187381973820738217382273823738247382573826738277382873829738307383173832738337383473835738367383773838738397384073841738427384373844738457384673847738487384973850738517385273853738547385573856738577385873859738607386173862738637386473865738667386773868738697387073871738727387373874738757387673877738787387973880738817388273883738847388573886738877388873889738907389173892738937389473895738967389773898738997390073901739027390373904739057390673907739087390973910739117391273913739147391573916739177391873919739207392173922739237392473925739267392773928739297393073931739327393373934739357393673937739387393973940739417394273943739447394573946739477394873949739507395173952739537395473955739567395773958739597396073961739627396373964739657396673967739687396973970739717397273973739747397573976739777397873979739807398173982739837398473985739867398773988739897399073991739927399373994739957399673997739987399974000740017400274003740047400574006740077400874009740107401174012740137401474015740167401774018740197402074021740227402374024740257402674027740287402974030740317403274033740347403574036740377403874039740407404174042740437404474045740467404774048740497405074051740527405374054740557405674057740587405974060740617406274063740647406574066740677406874069740707407174072740737407474075740767407774078740797408074081740827408374084740857408674087740887408974090740917409274093740947409574096740977409874099741007410174102741037410474105741067410774108741097411074111741127411374114741157411674117741187411974120741217412274123741247412574126741277412874129741307413174132741337413474135741367413774138741397414074141741427414374144741457414674147741487414974150741517415274153741547415574156741577415874159741607416174162741637416474165741667416774168741697417074171741727417374174741757417674177741787417974180741817418274183741847418574186741877418874189741907419174192741937419474195741967419774198741997420074201742027420374204742057420674207742087420974210742117421274213742147421574216742177421874219742207422174222742237422474225742267422774228742297423074231742327423374234742357423674237742387423974240742417424274243742447424574246742477424874249742507425174252742537425474255742567425774258742597426074261742627426374264742657426674267742687426974270742717427274273742747427574276742777427874279742807428174282742837428474285742867428774288742897429074291742927429374294742957429674297742987429974300743017430274303743047430574306743077430874309743107431174312743137431474315743167431774318743197432074321743227432374324743257432674327743287432974330743317433274333743347433574336743377433874339743407434174342743437434474345743467434774348743497435074351743527435374354743557435674357743587435974360743617436274363743647436574366743677436874369743707437174372743737437474375743767437774378743797438074381743827438374384743857438674387743887438974390743917439274393743947439574396743977439874399744007440174402744037440474405744067440774408744097441074411744127441374414744157441674417744187441974420744217442274423744247442574426744277442874429744307443174432744337443474435744367443774438744397444074441744427444374444744457444674447744487444974450744517445274453744547445574456744577445874459744607446174462744637446474465744667446774468744697447074471744727447374474744757447674477744787447974480744817448274483744847448574486744877448874489744907449174492744937449474495744967449774498744997450074501745027450374504745057450674507745087450974510745117451274513745147451574516745177451874519745207452174522745237452474525745267452774528745297453074531745327453374534745357453674537745387453974540745417454274543745447454574546745477454874549745507455174552745537455474555745567455774558745597456074561745627456374564745657456674567745687456974570745717457274573745747457574576745777457874579745807458174582745837458474585745867458774588745897459074591745927459374594745957459674597745987459974600746017460274603746047460574606746077460874609746107461174612746137461474615746167461774618746197462074621746227462374624746257462674627746287462974630746317463274633746347463574636746377463874639746407464174642746437464474645746467464774648746497465074651746527465374654746557465674657746587465974660746617466274663746647466574666746677466874669746707467174672746737467474675746767467774678746797468074681746827468374684746857468674687746887468974690746917469274693746947469574696746977469874699747007470174702747037470474705747067470774708747097471074711747127471374714747157471674717747187471974720747217472274723747247472574726747277472874729747307473174732747337473474735747367473774738747397474074741747427474374744747457474674747747487474974750747517475274753747547475574756747577475874759747607476174762747637476474765747667476774768747697477074771747727477374774747757477674777747787477974780747817478274783747847478574786747877478874789747907479174792747937479474795747967479774798747997480074801748027480374804748057480674807748087480974810748117481274813748147481574816748177481874819748207482174822748237482474825748267482774828748297483074831748327483374834748357483674837748387483974840748417484274843748447484574846748477484874849748507485174852748537485474855748567485774858748597486074861748627486374864748657486674867748687486974870748717487274873748747487574876748777487874879748807488174882748837488474885748867488774888748897489074891748927489374894748957489674897748987489974900749017490274903749047490574906749077490874909749107491174912749137491474915749167491774918749197492074921749227492374924749257492674927749287492974930749317493274933749347493574936749377493874939749407494174942749437494474945749467494774948749497495074951749527495374954749557495674957749587495974960749617496274963749647496574966749677496874969749707497174972749737497474975749767497774978749797498074981749827498374984749857498674987749887498974990749917499274993749947499574996749977499874999750007500175002750037500475005750067500775008750097501075011750127501375014750157501675017750187501975020750217502275023750247502575026750277502875029750307503175032750337503475035750367503775038750397504075041750427504375044750457504675047750487504975050750517505275053750547505575056750577505875059750607506175062750637506475065750667506775068750697507075071750727507375074750757507675077750787507975080750817508275083750847508575086750877508875089750907509175092750937509475095750967509775098750997510075101751027510375104751057510675107751087510975110751117511275113751147511575116751177511875119751207512175122751237512475125751267512775128751297513075131751327513375134751357513675137751387513975140751417514275143751447514575146751477514875149751507515175152751537515475155751567515775158751597516075161751627516375164751657516675167751687516975170751717517275173751747517575176751777517875179751807518175182751837518475185751867518775188751897519075191751927519375194751957519675197751987519975200752017520275203752047520575206752077520875209752107521175212752137521475215752167521775218752197522075221752227522375224752257522675227752287522975230752317523275233752347523575236752377523875239752407524175242752437524475245752467524775248752497525075251752527525375254752557525675257752587525975260752617526275263752647526575266752677526875269752707527175272752737527475275752767527775278752797528075281752827528375284752857528675287752887528975290752917529275293752947529575296752977529875299753007530175302753037530475305753067530775308753097531075311753127531375314753157531675317753187531975320753217532275323753247532575326753277532875329753307533175332753337533475335753367533775338753397534075341753427534375344753457534675347753487534975350753517535275353753547535575356753577535875359753607536175362753637536475365753667536775368753697537075371753727537375374753757537675377753787537975380753817538275383753847538575386753877538875389753907539175392753937539475395753967539775398753997540075401754027540375404754057540675407754087540975410754117541275413754147541575416754177541875419754207542175422754237542475425754267542775428754297543075431754327543375434754357543675437754387543975440754417544275443754447544575446754477544875449754507545175452754537545475455754567545775458754597546075461754627546375464754657546675467754687546975470754717547275473754747547575476754777547875479754807548175482754837548475485754867548775488754897549075491754927549375494754957549675497754987549975500755017550275503755047550575506755077550875509755107551175512755137551475515755167551775518755197552075521755227552375524755257552675527755287552975530755317553275533755347553575536755377553875539755407554175542755437554475545755467554775548755497555075551755527555375554755557555675557755587555975560755617556275563755647556575566755677556875569755707557175572755737557475575755767557775578755797558075581755827558375584755857558675587755887558975590755917559275593755947559575596755977559875599756007560175602756037560475605756067560775608756097561075611756127561375614756157561675617756187561975620756217562275623756247562575626756277562875629756307563175632756337563475635756367563775638756397564075641756427564375644756457564675647756487564975650756517565275653756547565575656756577565875659756607566175662756637566475665756667566775668756697567075671756727567375674756757567675677756787567975680756817568275683756847568575686756877568875689756907569175692756937569475695756967569775698756997570075701757027570375704757057570675707757087570975710757117571275713757147571575716757177571875719757207572175722757237572475725757267572775728757297573075731757327573375734757357573675737757387573975740757417574275743757447574575746757477574875749757507575175752757537575475755757567575775758757597576075761757627576375764757657576675767757687576975770757717577275773757747577575776757777577875779757807578175782757837578475785757867578775788757897579075791757927579375794757957579675797757987579975800758017580275803758047580575806758077580875809758107581175812758137581475815758167581775818758197582075821758227582375824758257582675827758287582975830758317583275833758347583575836758377583875839758407584175842758437584475845758467584775848758497585075851758527585375854758557585675857758587585975860758617586275863758647586575866758677586875869758707587175872758737587475875758767587775878758797588075881758827588375884758857588675887758887588975890758917589275893758947589575896758977589875899759007590175902759037590475905759067590775908759097591075911759127591375914759157591675917759187591975920759217592275923759247592575926759277592875929759307593175932759337593475935759367593775938759397594075941759427594375944759457594675947759487594975950759517595275953759547595575956759577595875959759607596175962759637596475965759667596775968759697597075971759727597375974759757597675977759787597975980759817598275983759847598575986759877598875989759907599175992759937599475995759967599775998759997600076001760027600376004760057600676007760087600976010760117601276013760147601576016760177601876019760207602176022760237602476025760267602776028760297603076031760327603376034760357603676037760387603976040760417604276043760447604576046760477604876049760507605176052760537605476055760567605776058760597606076061760627606376064760657606676067760687606976070760717607276073760747607576076760777607876079760807608176082760837608476085760867608776088760897609076091760927609376094760957609676097760987609976100761017610276103761047610576106761077610876109761107611176112761137611476115761167611776118761197612076121761227612376124761257612676127761287612976130761317613276133761347613576136761377613876139761407614176142761437614476145761467614776148761497615076151761527615376154761557615676157761587615976160761617616276163761647616576166761677616876169761707617176172761737617476175761767617776178761797618076181761827618376184761857618676187761887618976190761917619276193761947619576196761977619876199762007620176202762037620476205762067620776208762097621076211762127621376214762157621676217762187621976220762217622276223762247622576226762277622876229762307623176232762337623476235762367623776238762397624076241762427624376244762457624676247762487624976250762517625276253762547625576256762577625876259762607626176262762637626476265762667626776268762697627076271762727627376274762757627676277762787627976280762817628276283762847628576286762877628876289762907629176292762937629476295762967629776298762997630076301763027630376304763057630676307763087630976310763117631276313763147631576316763177631876319763207632176322763237632476325763267632776328763297633076331763327633376334763357633676337763387633976340763417634276343763447634576346763477634876349763507635176352763537635476355763567635776358763597636076361763627636376364763657636676367763687636976370763717637276373763747637576376763777637876379763807638176382763837638476385763867638776388763897639076391763927639376394763957639676397763987639976400764017640276403764047640576406764077640876409764107641176412764137641476415764167641776418764197642076421764227642376424764257642676427764287642976430764317643276433764347643576436764377643876439764407644176442764437644476445764467644776448764497645076451764527645376454764557645676457764587645976460764617646276463764647646576466764677646876469764707647176472764737647476475764767647776478764797648076481764827648376484764857648676487764887648976490764917649276493764947649576496764977649876499765007650176502765037650476505765067650776508765097651076511765127651376514765157651676517765187651976520765217652276523765247652576526765277652876529765307653176532765337653476535765367653776538765397654076541765427654376544765457654676547765487654976550765517655276553765547655576556765577655876559765607656176562765637656476565765667656776568765697657076571765727657376574765757657676577765787657976580765817658276583765847658576586765877658876589765907659176592765937659476595765967659776598765997660076601766027660376604766057660676607766087660976610766117661276613766147661576616766177661876619766207662176622766237662476625766267662776628766297663076631766327663376634766357663676637766387663976640766417664276643766447664576646766477664876649766507665176652766537665476655766567665776658766597666076661766627666376664766657666676667766687666976670766717667276673766747667576676766777667876679766807668176682766837668476685766867668776688766897669076691766927669376694766957669676697766987669976700767017670276703767047670576706767077670876709767107671176712767137671476715767167671776718767197672076721767227672376724767257672676727767287672976730767317673276733767347673576736767377673876739767407674176742767437674476745767467674776748767497675076751767527675376754767557675676757767587675976760767617676276763767647676576766767677676876769767707677176772767737677476775767767677776778767797678076781767827678376784767857678676787767887678976790767917679276793767947679576796767977679876799768007680176802768037680476805768067680776808768097681076811768127681376814768157681676817768187681976820768217682276823768247682576826768277682876829768307683176832768337683476835768367683776838768397684076841768427684376844768457684676847768487684976850768517685276853768547685576856768577685876859768607686176862768637686476865768667686776868768697687076871768727687376874768757687676877768787687976880768817688276883768847688576886768877688876889768907689176892768937689476895768967689776898768997690076901769027690376904769057690676907769087690976910769117691276913769147691576916769177691876919769207692176922769237692476925769267692776928769297693076931769327693376934769357693676937769387693976940769417694276943769447694576946769477694876949769507695176952769537695476955769567695776958769597696076961769627696376964769657696676967769687696976970769717697276973769747697576976769777697876979769807698176982769837698476985769867698776988769897699076991769927699376994769957699676997769987699977000770017700277003770047700577006770077700877009770107701177012770137701477015770167701777018770197702077021770227702377024770257702677027770287702977030770317703277033770347703577036770377703877039770407704177042770437704477045770467704777048770497705077051770527705377054770557705677057770587705977060770617706277063770647706577066770677706877069770707707177072770737707477075770767707777078770797708077081770827708377084770857708677087770887708977090770917709277093770947709577096770977709877099771007710177102771037710477105771067710777108771097711077111771127711377114771157711677117771187711977120771217712277123771247712577126771277712877129771307713177132771337713477135771367713777138771397714077141771427714377144771457714677147771487714977150771517715277153771547715577156771577715877159771607716177162771637716477165771667716777168771697717077171771727717377174771757717677177771787717977180771817718277183771847718577186771877718877189771907719177192771937719477195771967719777198771997720077201772027720377204772057720677207772087720977210772117721277213772147721577216772177721877219772207722177222772237722477225772267722777228772297723077231772327723377234772357723677237772387723977240772417724277243772447724577246772477724877249772507725177252772537725477255772567725777258772597726077261772627726377264772657726677267772687726977270772717727277273772747727577276772777727877279772807728177282772837728477285772867728777288772897729077291772927729377294772957729677297772987729977300773017730277303773047730577306773077730877309773107731177312773137731477315773167731777318773197732077321773227732377324773257732677327773287732977330773317733277333773347733577336773377733877339773407734177342773437734477345773467734777348773497735077351773527735377354773557735677357773587735977360773617736277363773647736577366773677736877369773707737177372773737737477375773767737777378773797738077381773827738377384773857738677387773887738977390773917739277393773947739577396773977739877399774007740177402774037740477405774067740777408774097741077411774127741377414774157741677417774187741977420774217742277423774247742577426774277742877429774307743177432774337743477435774367743777438774397744077441774427744377444774457744677447774487744977450774517745277453774547745577456774577745877459774607746177462774637746477465774667746777468774697747077471774727747377474774757747677477774787747977480774817748277483774847748577486774877748877489774907749177492774937749477495774967749777498774997750077501775027750377504775057750677507775087750977510775117751277513775147751577516775177751877519775207752177522775237752477525775267752777528775297753077531775327753377534775357753677537775387753977540775417754277543775447754577546775477754877549775507755177552775537755477555775567755777558775597756077561775627756377564775657756677567775687756977570775717757277573775747757577576775777757877579775807758177582775837758477585775867758777588775897759077591775927759377594775957759677597775987759977600776017760277603776047760577606776077760877609776107761177612776137761477615776167761777618776197762077621776227762377624776257762677627776287762977630776317763277633776347763577636776377763877639776407764177642776437764477645776467764777648776497765077651776527765377654776557765677657776587765977660776617766277663776647766577666776677766877669776707767177672776737767477675776767767777678776797768077681776827768377684776857768677687776887768977690776917769277693776947769577696776977769877699777007770177702777037770477705777067770777708777097771077711777127771377714777157771677717777187771977720777217772277723777247772577726777277772877729777307773177732777337773477735777367773777738777397774077741777427774377744777457774677747777487774977750777517775277753777547775577756777577775877759777607776177762777637776477765777667776777768777697777077771777727777377774777757777677777777787777977780777817778277783777847778577786777877778877789777907779177792777937779477795777967779777798777997780077801778027780377804778057780677807778087780977810778117781277813778147781577816778177781877819778207782177822778237782477825778267782777828778297783077831778327783377834778357783677837778387783977840778417784277843778447784577846778477784877849778507785177852778537785477855778567785777858778597786077861778627786377864778657786677867778687786977870778717787277873778747787577876778777787877879778807788177882778837788477885778867788777888778897789077891778927789377894778957789677897778987789977900779017790277903779047790577906779077790877909779107791177912779137791477915779167791777918779197792077921779227792377924779257792677927779287792977930779317793277933779347793577936779377793877939779407794177942779437794477945779467794777948779497795077951779527795377954779557795677957779587795977960779617796277963779647796577966779677796877969779707797177972779737797477975779767797777978779797798077981779827798377984779857798677987779887798977990779917799277993779947799577996779977799877999780007800178002780037800478005780067800778008780097801078011780127801378014780157801678017780187801978020780217802278023780247802578026780277802878029780307803178032780337803478035780367803778038780397804078041780427804378044780457804678047780487804978050780517805278053780547805578056780577805878059780607806178062780637806478065780667806778068780697807078071780727807378074780757807678077780787807978080780817808278083780847808578086780877808878089780907809178092780937809478095780967809778098780997810078101781027810378104781057810678107781087810978110781117811278113781147811578116781177811878119781207812178122781237812478125781267812778128781297813078131781327813378134781357813678137781387813978140781417814278143781447814578146781477814878149781507815178152781537815478155781567815778158781597816078161781627816378164781657816678167781687816978170781717817278173781747817578176781777817878179781807818178182781837818478185781867818778188781897819078191781927819378194781957819678197781987819978200782017820278203782047820578206782077820878209782107821178212782137821478215782167821778218782197822078221782227822378224782257822678227782287822978230782317823278233782347823578236782377823878239782407824178242782437824478245782467824778248782497825078251782527825378254782557825678257782587825978260782617826278263782647826578266782677826878269782707827178272782737827478275782767827778278782797828078281782827828378284782857828678287782887828978290782917829278293782947829578296782977829878299783007830178302783037830478305783067830778308783097831078311783127831378314783157831678317783187831978320783217832278323783247832578326783277832878329783307833178332783337833478335783367833778338783397834078341783427834378344783457834678347783487834978350783517835278353783547835578356783577835878359783607836178362783637836478365783667836778368783697837078371783727837378374783757837678377783787837978380783817838278383783847838578386783877838878389783907839178392783937839478395783967839778398783997840078401784027840378404784057840678407784087840978410784117841278413784147841578416784177841878419784207842178422784237842478425784267842778428784297843078431784327843378434784357843678437784387843978440784417844278443784447844578446784477844878449784507845178452784537845478455784567845778458784597846078461784627846378464784657846678467784687846978470784717847278473784747847578476784777847878479784807848178482784837848478485784867848778488784897849078491784927849378494784957849678497784987849978500785017850278503785047850578506785077850878509785107851178512785137851478515785167851778518785197852078521785227852378524785257852678527785287852978530785317853278533785347853578536785377853878539785407854178542785437854478545785467854778548785497855078551785527855378554785557855678557785587855978560785617856278563785647856578566785677856878569785707857178572785737857478575785767857778578785797858078581785827858378584785857858678587785887858978590785917859278593785947859578596785977859878599786007860178602786037860478605786067860778608786097861078611786127861378614786157861678617786187861978620786217862278623786247862578626786277862878629786307863178632786337863478635786367863778638786397864078641786427864378644786457864678647786487864978650786517865278653786547865578656786577865878659786607866178662786637866478665786667866778668786697867078671786727867378674786757867678677786787867978680786817868278683786847868578686786877868878689786907869178692786937869478695786967869778698786997870078701787027870378704787057870678707787087870978710787117871278713787147871578716787177871878719787207872178722787237872478725787267872778728787297873078731787327873378734787357873678737787387873978740787417874278743787447874578746787477874878749787507875178752787537875478755787567875778758787597876078761787627876378764787657876678767787687876978770787717877278773787747877578776787777877878779787807878178782787837878478785787867878778788787897879078791787927879378794787957879678797787987879978800788017880278803788047880578806788077880878809788107881178812788137881478815788167881778818788197882078821788227882378824788257882678827788287882978830788317883278833788347883578836788377883878839788407884178842788437884478845788467884778848788497885078851788527885378854788557885678857788587885978860788617886278863788647886578866788677886878869788707887178872788737887478875788767887778878788797888078881788827888378884788857888678887788887888978890788917889278893788947889578896788977889878899789007890178902789037890478905789067890778908789097891078911789127891378914789157891678917789187891978920789217892278923789247892578926789277892878929789307893178932789337893478935789367893778938789397894078941789427894378944789457894678947789487894978950789517895278953789547895578956789577895878959789607896178962789637896478965789667896778968789697897078971789727897378974789757897678977789787897978980789817898278983789847898578986789877898878989789907899178992789937899478995789967899778998789997900079001790027900379004790057900679007790087900979010790117901279013790147901579016790177901879019790207902179022790237902479025790267902779028790297903079031790327903379034790357903679037790387903979040790417904279043790447904579046790477904879049790507905179052790537905479055790567905779058790597906079061790627906379064790657906679067790687906979070790717907279073790747907579076790777907879079790807908179082790837908479085790867908779088790897909079091790927909379094790957909679097790987909979100791017910279103791047910579106791077910879109791107911179112791137911479115791167911779118791197912079121791227912379124791257912679127791287912979130791317913279133791347913579136791377913879139791407914179142791437914479145791467914779148791497915079151791527915379154791557915679157791587915979160791617916279163791647916579166791677916879169791707917179172791737917479175791767917779178791797918079181791827918379184791857918679187791887918979190791917919279193791947919579196791977919879199792007920179202792037920479205792067920779208792097921079211792127921379214792157921679217792187921979220792217922279223792247922579226792277922879229792307923179232792337923479235792367923779238792397924079241792427924379244792457924679247792487924979250792517925279253792547925579256792577925879259792607926179262792637926479265792667926779268792697927079271792727927379274792757927679277792787927979280792817928279283792847928579286792877928879289792907929179292792937929479295792967929779298792997930079301793027930379304793057930679307793087930979310793117931279313793147931579316793177931879319793207932179322793237932479325793267932779328793297933079331793327933379334793357933679337793387933979340793417934279343793447934579346793477934879349793507935179352793537935479355793567935779358793597936079361793627936379364793657936679367793687936979370793717937279373793747937579376793777937879379793807938179382793837938479385793867938779388793897939079391793927939379394793957939679397793987939979400794017940279403794047940579406794077940879409794107941179412794137941479415794167941779418794197942079421794227942379424794257942679427794287942979430794317943279433794347943579436794377943879439794407944179442794437944479445794467944779448794497945079451794527945379454794557945679457794587945979460794617946279463794647946579466794677946879469794707947179472794737947479475794767947779478794797948079481794827948379484794857948679487794887948979490794917949279493794947949579496794977949879499795007950179502795037950479505795067950779508795097951079511795127951379514795157951679517795187951979520795217952279523795247952579526795277952879529795307953179532795337953479535795367953779538795397954079541795427954379544795457954679547795487954979550795517955279553795547955579556795577955879559795607956179562795637956479565795667956779568795697957079571795727957379574795757957679577795787957979580795817958279583795847958579586795877958879589795907959179592795937959479595795967959779598795997960079601796027960379604796057960679607796087960979610796117961279613796147961579616796177961879619796207962179622796237962479625796267962779628796297963079631796327963379634796357963679637796387963979640796417964279643796447964579646796477964879649796507965179652796537965479655796567965779658796597966079661796627966379664796657966679667796687966979670796717967279673796747967579676796777967879679796807968179682796837968479685796867968779688796897969079691796927969379694796957969679697796987969979700797017970279703797047970579706797077970879709797107971179712797137971479715797167971779718797197972079721797227972379724797257972679727797287972979730797317973279733797347973579736797377973879739797407974179742797437974479745797467974779748797497975079751797527975379754797557975679757797587975979760797617976279763797647976579766797677976879769797707977179772797737977479775797767977779778797797978079781797827978379784797857978679787797887978979790797917979279793797947979579796797977979879799798007980179802798037980479805798067980779808798097981079811798127981379814798157981679817798187981979820798217982279823798247982579826798277982879829798307983179832798337983479835798367983779838798397984079841798427984379844798457984679847798487984979850798517985279853798547985579856798577985879859798607986179862798637986479865798667986779868798697987079871798727987379874798757987679877798787987979880798817988279883798847988579886798877988879889798907989179892798937989479895798967989779898798997990079901799027990379904799057990679907799087990979910799117991279913799147991579916799177991879919799207992179922799237992479925799267992779928799297993079931799327993379934799357993679937799387993979940799417994279943799447994579946799477994879949799507995179952799537995479955799567995779958799597996079961799627996379964799657996679967799687996979970799717997279973799747997579976799777997879979799807998179982799837998479985799867998779988799897999079991799927999379994799957999679997799987999980000800018000280003800048000580006800078000880009800108001180012800138001480015800168001780018800198002080021800228002380024800258002680027800288002980030800318003280033800348003580036800378003880039800408004180042800438004480045800468004780048800498005080051800528005380054800558005680057800588005980060800618006280063800648006580066800678006880069800708007180072800738007480075800768007780078800798008080081800828008380084800858008680087800888008980090800918009280093800948009580096800978009880099801008010180102801038010480105801068010780108801098011080111801128011380114801158011680117801188011980120801218012280123801248012580126801278012880129801308013180132801338013480135801368013780138801398014080141801428014380144801458014680147801488014980150801518015280153801548015580156801578015880159801608016180162801638016480165801668016780168801698017080171801728017380174801758017680177801788017980180801818018280183801848018580186801878018880189801908019180192801938019480195801968019780198801998020080201802028020380204802058020680207802088020980210802118021280213802148021580216802178021880219802208022180222802238022480225802268022780228802298023080231802328023380234802358023680237802388023980240802418024280243802448024580246802478024880249802508025180252802538025480255802568025780258802598026080261802628026380264802658026680267802688026980270802718027280273802748027580276802778027880279802808028180282802838028480285802868028780288802898029080291802928029380294802958029680297802988029980300803018030280303803048030580306803078030880309803108031180312803138031480315803168031780318803198032080321803228032380324803258032680327803288032980330803318033280333803348033580336803378033880339803408034180342803438034480345803468034780348803498035080351803528035380354803558035680357803588035980360803618036280363803648036580366803678036880369803708037180372803738037480375803768037780378803798038080381803828038380384803858038680387803888038980390803918039280393803948039580396803978039880399804008040180402804038040480405804068040780408804098041080411804128041380414804158041680417804188041980420804218042280423804248042580426804278042880429804308043180432804338043480435804368043780438804398044080441804428044380444804458044680447804488044980450804518045280453804548045580456804578045880459804608046180462804638046480465804668046780468804698047080471804728047380474804758047680477804788047980480804818048280483804848048580486804878048880489804908049180492804938049480495804968049780498804998050080501805028050380504805058050680507805088050980510805118051280513805148051580516805178051880519805208052180522805238052480525805268052780528805298053080531805328053380534805358053680537805388053980540805418054280543805448054580546805478054880549805508055180552805538055480555805568055780558805598056080561805628056380564805658056680567805688056980570805718057280573805748057580576805778057880579805808058180582805838058480585805868058780588805898059080591805928059380594805958059680597805988059980600806018060280603806048060580606806078060880609806108061180612806138061480615806168061780618806198062080621806228062380624806258062680627806288062980630806318063280633806348063580636806378063880639806408064180642806438064480645806468064780648806498065080651806528065380654806558065680657806588065980660806618066280663806648066580666806678066880669806708067180672806738067480675806768067780678806798068080681806828068380684806858068680687806888068980690806918069280693806948069580696806978069880699807008070180702807038070480705807068070780708807098071080711807128071380714807158071680717807188071980720807218072280723807248072580726807278072880729807308073180732807338073480735807368073780738807398074080741807428074380744807458074680747807488074980750807518075280753807548075580756807578075880759807608076180762807638076480765807668076780768807698077080771807728077380774807758077680777807788077980780807818078280783807848078580786807878078880789807908079180792807938079480795807968079780798807998080080801808028080380804808058080680807808088080980810808118081280813808148081580816808178081880819808208082180822808238082480825808268082780828808298083080831808328083380834808358083680837808388083980840808418084280843808448084580846808478084880849808508085180852808538085480855808568085780858808598086080861808628086380864808658086680867808688086980870808718087280873808748087580876808778087880879808808088180882808838088480885808868088780888808898089080891808928089380894808958089680897808988089980900809018090280903809048090580906809078090880909809108091180912809138091480915809168091780918809198092080921809228092380924809258092680927809288092980930809318093280933809348093580936809378093880939809408094180942809438094480945809468094780948809498095080951809528095380954809558095680957809588095980960809618096280963809648096580966809678096880969809708097180972809738097480975809768097780978809798098080981809828098380984809858098680987809888098980990809918099280993809948099580996809978099880999810008100181002810038100481005810068100781008810098101081011810128101381014810158101681017810188101981020810218102281023810248102581026810278102881029810308103181032810338103481035810368103781038810398104081041810428104381044810458104681047810488104981050810518105281053810548105581056810578105881059810608106181062810638106481065810668106781068810698107081071810728107381074810758107681077810788107981080810818108281083810848108581086810878108881089810908109181092810938109481095810968109781098810998110081101811028110381104811058110681107811088110981110811118111281113811148111581116811178111881119811208112181122811238112481125811268112781128811298113081131811328113381134811358113681137811388113981140811418114281143811448114581146811478114881149811508115181152811538115481155811568115781158811598116081161811628116381164811658116681167811688116981170811718117281173811748117581176811778117881179811808118181182811838118481185811868118781188811898119081191811928119381194811958119681197811988119981200812018120281203812048120581206812078120881209812108121181212812138121481215812168121781218812198122081221812228122381224812258122681227812288122981230812318123281233812348123581236812378123881239812408124181242812438124481245812468124781248812498125081251812528125381254812558125681257812588125981260812618126281263812648126581266812678126881269812708127181272812738127481275812768127781278812798128081281812828128381284812858128681287812888128981290812918129281293812948129581296812978129881299813008130181302813038130481305813068130781308813098131081311813128131381314813158131681317813188131981320813218132281323813248132581326813278132881329813308133181332813338133481335813368133781338813398134081341813428134381344813458134681347813488134981350813518135281353813548135581356813578135881359813608136181362813638136481365813668136781368813698137081371813728137381374813758137681377813788137981380813818138281383813848138581386813878138881389813908139181392813938139481395813968139781398813998140081401814028140381404814058140681407814088140981410814118141281413814148141581416814178141881419814208142181422814238142481425814268142781428814298143081431814328143381434814358143681437814388143981440814418144281443814448144581446814478144881449814508145181452814538145481455814568145781458814598146081461814628146381464814658146681467814688146981470814718147281473814748147581476814778147881479814808148181482814838148481485814868148781488814898149081491814928149381494814958149681497814988149981500815018150281503815048150581506815078150881509815108151181512815138151481515815168151781518815198152081521815228152381524815258152681527815288152981530815318153281533815348153581536815378153881539815408154181542815438154481545815468154781548815498155081551815528155381554815558155681557815588155981560815618156281563815648156581566815678156881569815708157181572815738157481575815768157781578815798158081581815828158381584815858158681587815888158981590815918159281593815948159581596815978159881599816008160181602816038160481605816068160781608816098161081611816128161381614816158161681617816188161981620816218162281623816248162581626816278162881629816308163181632816338163481635816368163781638816398164081641816428164381644816458164681647816488164981650816518165281653816548165581656816578165881659816608166181662816638166481665816668166781668816698167081671816728167381674816758167681677816788167981680816818168281683816848168581686816878168881689816908169181692816938169481695816968169781698816998170081701817028170381704817058170681707817088170981710817118171281713817148171581716817178171881719817208172181722817238172481725817268172781728817298173081731817328173381734817358173681737817388173981740817418174281743817448174581746817478174881749817508175181752817538175481755817568175781758817598176081761817628176381764817658176681767817688176981770817718177281773817748177581776817778177881779817808178181782817838178481785817868178781788817898179081791817928179381794817958179681797817988179981800818018180281803818048180581806818078180881809818108181181812818138181481815818168181781818818198182081821818228182381824818258182681827818288182981830818318183281833818348183581836818378183881839818408184181842818438184481845818468184781848818498185081851818528185381854818558185681857818588185981860818618186281863818648186581866818678186881869818708187181872818738187481875818768187781878818798188081881818828188381884818858188681887818888188981890818918189281893818948189581896818978189881899819008190181902819038190481905819068190781908819098191081911819128191381914819158191681917819188191981920819218192281923819248192581926819278192881929819308193181932819338193481935819368193781938819398194081941819428194381944819458194681947819488194981950819518195281953819548195581956819578195881959819608196181962819638196481965819668196781968819698197081971819728197381974819758197681977819788197981980819818198281983819848198581986819878198881989819908199181992819938199481995819968199781998819998200082001820028200382004820058200682007820088200982010820118201282013820148201582016820178201882019820208202182022820238202482025820268202782028820298203082031820328203382034820358203682037820388203982040820418204282043820448204582046820478204882049820508205182052820538205482055820568205782058820598206082061820628206382064820658206682067820688206982070820718207282073820748207582076820778207882079820808208182082820838208482085820868208782088820898209082091820928209382094820958209682097820988209982100821018210282103821048210582106821078210882109821108211182112821138211482115821168211782118821198212082121821228212382124821258212682127821288212982130821318213282133821348213582136821378213882139821408214182142821438214482145821468214782148821498215082151821528215382154821558215682157821588215982160821618216282163821648216582166821678216882169821708217182172821738217482175821768217782178821798218082181821828218382184821858218682187821888218982190821918219282193821948219582196821978219882199822008220182202822038220482205822068220782208822098221082211822128221382214822158221682217822188221982220822218222282223822248222582226822278222882229822308223182232822338223482235822368223782238822398224082241822428224382244822458224682247822488224982250822518225282253822548225582256822578225882259822608226182262822638226482265822668226782268822698227082271822728227382274822758227682277822788227982280822818228282283822848228582286822878228882289822908229182292822938229482295822968229782298822998230082301823028230382304823058230682307823088230982310823118231282313823148231582316823178231882319823208232182322823238232482325823268232782328823298233082331823328233382334823358233682337823388233982340823418234282343823448234582346823478234882349823508235182352823538235482355823568235782358823598236082361823628236382364823658236682367823688236982370823718237282373823748237582376823778237882379823808238182382823838238482385823868238782388823898239082391823928239382394823958239682397823988239982400824018240282403824048240582406824078240882409824108241182412824138241482415824168241782418824198242082421824228242382424824258242682427824288242982430824318243282433824348243582436824378243882439824408244182442824438244482445824468244782448824498245082451824528245382454824558245682457824588245982460824618246282463824648246582466824678246882469824708247182472824738247482475824768247782478824798248082481824828248382484824858248682487824888248982490824918249282493824948249582496824978249882499825008250182502825038250482505825068250782508825098251082511825128251382514825158251682517825188251982520825218252282523825248252582526825278252882529825308253182532825338253482535825368253782538825398254082541825428254382544825458254682547825488254982550825518255282553825548255582556825578255882559825608256182562825638256482565825668256782568825698257082571825728257382574825758257682577825788257982580825818258282583825848258582586825878258882589825908259182592825938259482595825968259782598825998260082601826028260382604826058260682607826088260982610826118261282613826148261582616826178261882619826208262182622826238262482625826268262782628826298263082631826328263382634826358263682637826388263982640826418264282643826448264582646826478264882649826508265182652826538265482655826568265782658826598266082661826628266382664826658266682667826688266982670826718267282673826748267582676826778267882679826808268182682826838268482685826868268782688826898269082691826928269382694826958269682697826988269982700827018270282703827048270582706827078270882709827108271182712827138271482715827168271782718827198272082721827228272382724827258272682727827288272982730827318273282733827348273582736827378273882739827408274182742827438274482745827468274782748827498275082751827528275382754827558275682757827588275982760827618276282763827648276582766827678276882769827708277182772827738277482775827768277782778827798278082781827828278382784827858278682787827888278982790827918279282793827948279582796827978279882799828008280182802828038280482805828068280782808828098281082811828128281382814828158281682817828188281982820828218282282823828248282582826828278282882829828308283182832828338283482835828368283782838828398284082841828428284382844828458284682847828488284982850828518285282853828548285582856828578285882859828608286182862828638286482865828668286782868828698287082871828728287382874828758287682877828788287982880828818288282883828848288582886828878288882889828908289182892828938289482895828968289782898828998290082901829028290382904829058290682907829088290982910829118291282913829148291582916829178291882919829208292182922829238292482925829268292782928829298293082931829328293382934829358293682937829388293982940829418294282943829448294582946829478294882949829508295182952829538295482955829568295782958829598296082961829628296382964829658296682967829688296982970829718297282973829748297582976829778297882979829808298182982829838298482985829868298782988829898299082991829928299382994829958299682997829988299983000830018300283003830048300583006830078300883009830108301183012830138301483015830168301783018830198302083021830228302383024830258302683027830288302983030830318303283033830348303583036830378303883039830408304183042830438304483045830468304783048830498305083051830528305383054830558305683057830588305983060830618306283063830648306583066830678306883069830708307183072830738307483075830768307783078830798308083081830828308383084830858308683087830888308983090830918309283093830948309583096830978309883099831008310183102831038310483105831068310783108831098311083111831128311383114831158311683117831188311983120831218312283123831248312583126831278312883129831308313183132831338313483135831368313783138831398314083141831428314383144831458314683147831488314983150831518315283153831548315583156831578315883159831608316183162831638316483165831668316783168831698317083171831728317383174831758317683177831788317983180831818318283183831848318583186831878318883189831908319183192831938319483195831968319783198831998320083201832028320383204832058320683207832088320983210832118321283213832148321583216832178321883219832208322183222832238322483225832268322783228832298323083231832328323383234832358323683237832388323983240832418324283243832448324583246832478324883249832508325183252832538325483255832568325783258832598326083261832628326383264832658326683267832688326983270832718327283273832748327583276832778327883279832808328183282832838328483285832868328783288832898329083291832928329383294832958329683297832988329983300833018330283303833048330583306833078330883309833108331183312833138331483315833168331783318833198332083321833228332383324833258332683327833288332983330833318333283333833348333583336833378333883339833408334183342833438334483345833468334783348833498335083351833528335383354833558335683357833588335983360833618336283363833648336583366833678336883369833708337183372833738337483375833768337783378833798338083381833828338383384833858338683387833888338983390833918339283393833948339583396833978339883399834008340183402834038340483405834068340783408834098341083411834128341383414834158341683417834188341983420834218342283423834248342583426834278342883429834308343183432834338343483435834368343783438834398344083441834428344383444834458344683447834488344983450834518345283453834548345583456834578345883459834608346183462834638346483465834668346783468834698347083471834728347383474834758347683477834788347983480834818348283483834848348583486834878348883489834908349183492834938349483495834968349783498834998350083501835028350383504835058350683507835088350983510835118351283513835148351583516835178351883519835208352183522835238352483525835268352783528835298353083531835328353383534835358353683537835388353983540835418354283543835448354583546835478354883549835508355183552835538355483555835568355783558835598356083561835628356383564835658356683567835688356983570835718357283573835748357583576835778357883579835808358183582835838358483585835868358783588835898359083591835928359383594835958359683597835988359983600836018360283603836048360583606836078360883609836108361183612836138361483615836168361783618836198362083621836228362383624836258362683627836288362983630836318363283633836348363583636836378363883639836408364183642836438364483645836468364783648836498365083651836528365383654836558365683657836588365983660836618366283663836648366583666836678366883669836708367183672836738367483675836768367783678836798368083681836828368383684836858368683687836888368983690836918369283693836948369583696836978369883699837008370183702837038370483705837068370783708837098371083711837128371383714837158371683717837188371983720837218372283723837248372583726837278372883729837308373183732837338373483735837368373783738837398374083741837428374383744837458374683747837488374983750837518375283753837548375583756837578375883759837608376183762837638376483765837668376783768837698377083771837728377383774837758377683777837788377983780837818378283783837848378583786837878378883789837908379183792837938379483795837968379783798837998380083801838028380383804838058380683807838088380983810838118381283813838148381583816838178381883819838208382183822838238382483825838268382783828838298383083831838328383383834838358383683837838388383983840838418384283843838448384583846838478384883849838508385183852838538385483855838568385783858838598386083861838628386383864838658386683867838688386983870838718387283873838748387583876838778387883879838808388183882838838388483885838868388783888838898389083891838928389383894838958389683897838988389983900839018390283903839048390583906839078390883909839108391183912839138391483915839168391783918839198392083921839228392383924839258392683927839288392983930839318393283933839348393583936839378393883939839408394183942839438394483945839468394783948839498395083951839528395383954839558395683957839588395983960839618396283963839648396583966839678396883969839708397183972839738397483975839768397783978839798398083981839828398383984839858398683987839888398983990839918399283993839948399583996839978399883999840008400184002840038400484005840068400784008840098401084011840128401384014840158401684017840188401984020840218402284023840248402584026840278402884029840308403184032840338403484035840368403784038840398404084041840428404384044840458404684047840488404984050840518405284053840548405584056840578405884059840608406184062840638406484065840668406784068840698407084071840728407384074840758407684077840788407984080840818408284083840848408584086840878408884089840908409184092840938409484095840968409784098840998410084101841028410384104841058410684107841088410984110841118411284113841148411584116841178411884119841208412184122841238412484125841268412784128841298413084131841328413384134841358413684137841388413984140841418414284143841448414584146841478414884149841508415184152841538415484155841568415784158841598416084161841628416384164841658416684167841688416984170841718417284173841748417584176841778417884179841808418184182841838418484185841868418784188841898419084191841928419384194841958419684197841988419984200842018420284203842048420584206842078420884209842108421184212842138421484215842168421784218842198422084221842228422384224842258422684227842288422984230842318423284233842348423584236842378423884239842408424184242842438424484245842468424784248842498425084251842528425384254842558425684257842588425984260842618426284263842648426584266842678426884269842708427184272842738427484275842768427784278842798428084281842828428384284842858428684287842888428984290842918429284293842948429584296842978429884299843008430184302843038430484305843068430784308843098431084311843128431384314843158431684317843188431984320843218432284323843248432584326843278432884329843308433184332843338433484335843368433784338843398434084341843428434384344843458434684347843488434984350843518435284353843548435584356843578435884359843608436184362843638436484365843668436784368843698437084371843728437384374843758437684377843788437984380843818438284383843848438584386843878438884389843908439184392843938439484395843968439784398843998440084401844028440384404844058440684407844088440984410844118441284413844148441584416844178441884419844208442184422844238442484425844268442784428844298443084431844328443384434844358443684437844388443984440844418444284443844448444584446844478444884449844508445184452844538445484455844568445784458844598446084461844628446384464844658446684467844688446984470844718447284473844748447584476844778447884479844808448184482844838448484485844868448784488844898449084491844928449384494844958449684497844988449984500845018450284503845048450584506845078450884509845108451184512845138451484515845168451784518845198452084521845228452384524845258452684527845288452984530845318453284533845348453584536845378453884539845408454184542845438454484545845468454784548845498455084551845528455384554845558455684557845588455984560845618456284563845648456584566845678456884569845708457184572845738457484575845768457784578845798458084581845828458384584845858458684587845888458984590845918459284593845948459584596845978459884599846008460184602846038460484605846068460784608846098461084611846128461384614846158461684617846188461984620846218462284623846248462584626846278462884629846308463184632846338463484635846368463784638846398464084641846428464384644846458464684647846488464984650846518465284653846548465584656846578465884659846608466184662846638466484665846668466784668846698467084671846728467384674846758467684677846788467984680846818468284683846848468584686846878468884689846908469184692846938469484695846968469784698846998470084701847028470384704847058470684707847088470984710847118471284713847148471584716847178471884719847208472184722847238472484725847268472784728847298473084731847328473384734847358473684737847388473984740847418474284743847448474584746847478474884749847508475184752847538475484755847568475784758847598476084761847628476384764847658476684767847688476984770847718477284773847748477584776847778477884779847808478184782847838478484785847868478784788847898479084791847928479384794847958479684797847988479984800848018480284803848048480584806848078480884809848108481184812848138481484815848168481784818848198482084821848228482384824848258482684827848288482984830848318483284833848348483584836848378483884839848408484184842848438484484845848468484784848848498485084851848528485384854848558485684857848588485984860848618486284863848648486584866848678486884869848708487184872848738487484875848768487784878848798488084881848828488384884848858488684887848888488984890848918489284893848948489584896848978489884899849008490184902849038490484905849068490784908849098491084911849128491384914849158491684917849188491984920849218492284923849248492584926849278492884929849308493184932849338493484935849368493784938849398494084941849428494384944849458494684947849488494984950849518495284953849548495584956849578495884959849608496184962849638496484965849668496784968849698497084971849728497384974849758497684977849788497984980849818498284983849848498584986849878498884989849908499184992849938499484995849968499784998849998500085001850028500385004850058500685007850088500985010850118501285013850148501585016850178501885019850208502185022850238502485025850268502785028850298503085031850328503385034850358503685037850388503985040850418504285043850448504585046850478504885049850508505185052850538505485055850568505785058850598506085061850628506385064850658506685067850688506985070850718507285073850748507585076850778507885079850808508185082850838508485085850868508785088850898509085091850928509385094850958509685097850988509985100851018510285103851048510585106851078510885109851108511185112851138511485115851168511785118851198512085121851228512385124851258512685127851288512985130851318513285133851348513585136851378513885139851408514185142851438514485145851468514785148851498515085151851528515385154851558515685157851588515985160851618516285163851648516585166851678516885169851708517185172851738517485175851768517785178851798518085181851828518385184851858518685187851888518985190851918519285193851948519585196851978519885199852008520185202852038520485205852068520785208852098521085211852128521385214852158521685217852188521985220852218522285223852248522585226852278522885229852308523185232852338523485235852368523785238852398524085241852428524385244852458524685247852488524985250852518525285253852548525585256852578525885259852608526185262852638526485265852668526785268852698527085271852728527385274852758527685277852788527985280852818528285283852848528585286852878528885289852908529185292852938529485295852968529785298852998530085301853028530385304853058530685307853088530985310853118531285313853148531585316853178531885319853208532185322853238532485325853268532785328853298533085331853328533385334853358533685337853388533985340853418534285343853448534585346853478534885349853508535185352853538535485355853568535785358853598536085361853628536385364853658536685367853688536985370853718537285373853748537585376853778537885379853808538185382853838538485385853868538785388853898539085391853928539385394853958539685397853988539985400854018540285403854048540585406854078540885409854108541185412854138541485415854168541785418854198542085421854228542385424854258542685427854288542985430854318543285433854348543585436854378543885439854408544185442854438544485445854468544785448854498545085451854528545385454854558545685457854588545985460854618546285463854648546585466854678546885469854708547185472854738547485475854768547785478854798548085481854828548385484854858548685487854888548985490854918549285493854948549585496854978549885499855008550185502855038550485505855068550785508855098551085511855128551385514855158551685517855188551985520855218552285523855248552585526855278552885529855308553185532855338553485535855368553785538855398554085541855428554385544855458554685547855488554985550855518555285553855548555585556855578555885559855608556185562855638556485565855668556785568855698557085571855728557385574855758557685577855788557985580855818558285583855848558585586855878558885589855908559185592855938559485595855968559785598855998560085601856028560385604856058560685607856088560985610856118561285613856148561585616856178561885619856208562185622856238562485625856268562785628856298563085631856328563385634856358563685637856388563985640856418564285643856448564585646856478564885649856508565185652856538565485655856568565785658856598566085661856628566385664856658566685667856688566985670856718567285673856748567585676856778567885679856808568185682856838568485685856868568785688856898569085691856928569385694856958569685697856988569985700857018570285703857048570585706857078570885709857108571185712857138571485715857168571785718857198572085721857228572385724857258572685727857288572985730857318573285733857348573585736857378573885739857408574185742857438574485745857468574785748857498575085751857528575385754857558575685757857588575985760857618576285763857648576585766857678576885769857708577185772857738577485775857768577785778857798578085781857828578385784857858578685787857888578985790857918579285793857948579585796857978579885799858008580185802858038580485805858068580785808858098581085811858128581385814858158581685817858188581985820858218582285823858248582585826858278582885829858308583185832858338583485835858368583785838858398584085841858428584385844858458584685847858488584985850858518585285853858548585585856858578585885859858608586185862858638586485865858668586785868858698587085871858728587385874858758587685877858788587985880858818588285883858848588585886858878588885889858908589185892858938589485895858968589785898858998590085901859028590385904859058590685907859088590985910859118591285913859148591585916859178591885919859208592185922859238592485925859268592785928859298593085931859328593385934859358593685937859388593985940859418594285943859448594585946859478594885949859508595185952859538595485955859568595785958859598596085961859628596385964859658596685967859688596985970859718597285973859748597585976859778597885979859808598185982859838598485985859868598785988859898599085991859928599385994859958599685997859988599986000860018600286003860048600586006860078600886009860108601186012860138601486015860168601786018860198602086021860228602386024860258602686027860288602986030860318603286033860348603586036860378603886039860408604186042860438604486045860468604786048860498605086051860528605386054860558605686057860588605986060860618606286063860648606586066860678606886069860708607186072860738607486075860768607786078860798608086081860828608386084860858608686087860888608986090860918609286093860948609586096860978609886099861008610186102861038610486105861068610786108861098611086111861128611386114861158611686117861188611986120861218612286123861248612586126861278612886129861308613186132861338613486135861368613786138861398614086141861428614386144861458614686147861488614986150861518615286153861548615586156861578615886159861608616186162861638616486165861668616786168861698617086171861728617386174861758617686177861788617986180861818618286183861848618586186861878618886189861908619186192861938619486195861968619786198861998620086201862028620386204862058620686207862088620986210862118621286213862148621586216862178621886219862208622186222862238622486225862268622786228862298623086231862328623386234862358623686237862388623986240862418624286243862448624586246862478624886249862508625186252862538625486255862568625786258862598626086261862628626386264862658626686267862688626986270862718627286273862748627586276862778627886279862808628186282862838628486285862868628786288862898629086291862928629386294862958629686297862988629986300863018630286303863048630586306863078630886309863108631186312863138631486315863168631786318863198632086321863228632386324863258632686327863288632986330863318633286333863348633586336863378633886339863408634186342863438634486345863468634786348863498635086351863528635386354863558635686357863588635986360863618636286363863648636586366863678636886369863708637186372863738637486375863768637786378863798638086381863828638386384863858638686387863888638986390863918639286393863948639586396863978639886399864008640186402864038640486405864068640786408864098641086411864128641386414864158641686417864188641986420864218642286423864248642586426864278642886429864308643186432864338643486435864368643786438864398644086441864428644386444864458644686447864488644986450864518645286453864548645586456864578645886459864608646186462864638646486465864668646786468864698647086471864728647386474864758647686477864788647986480864818648286483864848648586486864878648886489864908649186492864938649486495864968649786498864998650086501865028650386504865058650686507865088650986510865118651286513865148651586516865178651886519865208652186522865238652486525865268652786528865298653086531865328653386534865358653686537865388653986540865418654286543865448654586546865478654886549865508655186552865538655486555865568655786558865598656086561865628656386564865658656686567865688656986570865718657286573865748657586576865778657886579865808658186582865838658486585865868658786588865898659086591865928659386594865958659686597865988659986600866018660286603866048660586606866078660886609866108661186612866138661486615866168661786618866198662086621866228662386624866258662686627866288662986630866318663286633866348663586636866378663886639866408664186642866438664486645866468664786648866498665086651866528665386654866558665686657866588665986660866618666286663866648666586666866678666886669866708667186672866738667486675866768667786678866798668086681866828668386684866858668686687866888668986690866918669286693866948669586696866978669886699867008670186702867038670486705867068670786708867098671086711867128671386714867158671686717867188671986720867218672286723867248672586726867278672886729867308673186732867338673486735867368673786738867398674086741867428674386744867458674686747867488674986750867518675286753867548675586756867578675886759867608676186762867638676486765867668676786768867698677086771867728677386774867758677686777867788677986780867818678286783867848678586786867878678886789867908679186792867938679486795867968679786798867998680086801868028680386804868058680686807868088680986810868118681286813868148681586816868178681886819868208682186822868238682486825868268682786828868298683086831868328683386834868358683686837868388683986840868418684286843868448684586846868478684886849868508685186852868538685486855868568685786858868598686086861868628686386864868658686686867868688686986870868718687286873868748687586876868778687886879868808688186882868838688486885868868688786888868898689086891868928689386894868958689686897868988689986900869018690286903869048690586906869078690886909869108691186912869138691486915869168691786918869198692086921869228692386924869258692686927869288692986930869318693286933869348693586936869378693886939869408694186942869438694486945869468694786948869498695086951869528695386954869558695686957869588695986960869618696286963869648696586966869678696886969869708697186972869738697486975869768697786978869798698086981869828698386984869858698686987869888698986990869918699286993869948699586996869978699886999870008700187002870038700487005870068700787008870098701087011870128701387014870158701687017870188701987020870218702287023870248702587026870278702887029870308703187032870338703487035870368703787038870398704087041870428704387044870458704687047870488704987050870518705287053870548705587056870578705887059870608706187062870638706487065870668706787068870698707087071870728707387074870758707687077870788707987080870818708287083870848708587086870878708887089870908709187092870938709487095870968709787098870998710087101871028710387104871058710687107871088710987110871118711287113871148711587116871178711887119871208712187122871238712487125871268712787128871298713087131871328713387134871358713687137871388713987140871418714287143871448714587146871478714887149871508715187152871538715487155871568715787158871598716087161871628716387164871658716687167871688716987170871718717287173871748717587176871778717887179871808718187182871838718487185871868718787188871898719087191871928719387194871958719687197871988719987200872018720287203872048720587206872078720887209872108721187212872138721487215872168721787218872198722087221872228722387224872258722687227872288722987230872318723287233872348723587236872378723887239872408724187242872438724487245872468724787248872498725087251872528725387254872558725687257872588725987260872618726287263872648726587266872678726887269872708727187272872738727487275872768727787278872798728087281872828728387284872858728687287872888728987290872918729287293872948729587296872978729887299873008730187302873038730487305873068730787308873098731087311873128731387314873158731687317873188731987320873218732287323873248732587326873278732887329873308733187332873338733487335873368733787338873398734087341873428734387344873458734687347873488734987350873518735287353873548735587356873578735887359873608736187362873638736487365873668736787368873698737087371873728737387374873758737687377873788737987380873818738287383873848738587386873878738887389873908739187392873938739487395873968739787398873998740087401874028740387404874058740687407874088740987410874118741287413874148741587416874178741887419874208742187422874238742487425874268742787428874298743087431874328743387434874358743687437874388743987440874418744287443874448744587446874478744887449874508745187452874538745487455874568745787458874598746087461874628746387464874658746687467874688746987470874718747287473874748747587476874778747887479874808748187482874838748487485874868748787488874898749087491874928749387494874958749687497874988749987500875018750287503875048750587506875078750887509875108751187512875138751487515875168751787518875198752087521875228752387524875258752687527875288752987530875318753287533875348753587536875378753887539875408754187542875438754487545875468754787548875498755087551875528755387554875558755687557875588755987560875618756287563875648756587566875678756887569875708757187572875738757487575875768757787578875798758087581875828758387584875858758687587875888758987590875918759287593875948759587596875978759887599876008760187602876038760487605876068760787608876098761087611876128761387614876158761687617876188761987620876218762287623876248762587626876278762887629876308763187632876338763487635876368763787638876398764087641876428764387644876458764687647876488764987650876518765287653876548765587656876578765887659876608766187662876638766487665876668766787668876698767087671876728767387674876758767687677876788767987680876818768287683876848768587686876878768887689876908769187692876938769487695876968769787698876998770087701877028770387704877058770687707877088770987710877118771287713877148771587716877178771887719877208772187722877238772487725877268772787728877298773087731877328773387734877358773687737877388773987740877418774287743877448774587746877478774887749877508775187752877538775487755877568775787758877598776087761877628776387764877658776687767877688776987770877718777287773877748777587776877778777887779877808778187782877838778487785877868778787788877898779087791877928779387794877958779687797877988779987800878018780287803878048780587806878078780887809878108781187812878138781487815878168781787818878198782087821878228782387824878258782687827878288782987830878318783287833878348783587836878378783887839878408784187842878438784487845878468784787848878498785087851878528785387854878558785687857878588785987860878618786287863878648786587866878678786887869878708787187872878738787487875878768787787878878798788087881878828788387884878858788687887878888788987890878918789287893878948789587896878978789887899879008790187902879038790487905879068790787908879098791087911879128791387914879158791687917879188791987920879218792287923879248792587926879278792887929879308793187932879338793487935879368793787938879398794087941879428794387944879458794687947879488794987950879518795287953879548795587956879578795887959879608796187962879638796487965879668796787968879698797087971879728797387974879758797687977879788797987980879818798287983879848798587986879878798887989879908799187992879938799487995879968799787998879998800088001880028800388004880058800688007880088800988010880118801288013880148801588016880178801888019880208802188022880238802488025880268802788028880298803088031880328803388034880358803688037880388803988040880418804288043880448804588046880478804888049880508805188052880538805488055880568805788058880598806088061880628806388064880658806688067880688806988070880718807288073880748807588076880778807888079880808808188082880838808488085880868808788088880898809088091880928809388094880958809688097880988809988100881018810288103881048810588106881078810888109881108811188112881138811488115881168811788118881198812088121881228812388124881258812688127881288812988130881318813288133881348813588136881378813888139881408814188142881438814488145881468814788148881498815088151881528815388154881558815688157881588815988160881618816288163881648816588166881678816888169881708817188172881738817488175881768817788178881798818088181881828818388184881858818688187881888818988190881918819288193881948819588196881978819888199882008820188202882038820488205882068820788208882098821088211882128821388214882158821688217882188821988220882218822288223882248822588226882278822888229882308823188232882338823488235882368823788238882398824088241882428824388244882458824688247882488824988250882518825288253882548825588256882578825888259882608826188262882638826488265882668826788268882698827088271882728827388274882758827688277882788827988280882818828288283882848828588286882878828888289882908829188292882938829488295882968829788298882998830088301883028830388304883058830688307883088830988310883118831288313883148831588316883178831888319883208832188322883238832488325883268832788328883298833088331883328833388334883358833688337883388833988340883418834288343883448834588346883478834888349883508835188352883538835488355883568835788358883598836088361883628836388364883658836688367883688836988370883718837288373883748837588376883778837888379883808838188382883838838488385883868838788388883898839088391883928839388394883958839688397883988839988400884018840288403884048840588406884078840888409884108841188412884138841488415884168841788418884198842088421884228842388424884258842688427884288842988430884318843288433884348843588436884378843888439884408844188442884438844488445884468844788448884498845088451884528845388454884558845688457884588845988460884618846288463884648846588466884678846888469884708847188472884738847488475884768847788478884798848088481884828848388484884858848688487884888848988490884918849288493884948849588496884978849888499885008850188502885038850488505885068850788508885098851088511885128851388514885158851688517885188851988520885218852288523885248852588526885278852888529885308853188532885338853488535885368853788538885398854088541885428854388544885458854688547885488854988550885518855288553885548855588556885578855888559885608856188562885638856488565885668856788568885698857088571885728857388574885758857688577885788857988580885818858288583885848858588586885878858888589885908859188592885938859488595885968859788598885998860088601886028860388604886058860688607886088860988610886118861288613886148861588616886178861888619886208862188622886238862488625886268862788628886298863088631886328863388634886358863688637886388863988640886418864288643886448864588646886478864888649886508865188652886538865488655886568865788658886598866088661886628866388664886658866688667886688866988670886718867288673886748867588676886778867888679886808868188682886838868488685886868868788688886898869088691886928869388694886958869688697886988869988700887018870288703887048870588706887078870888709887108871188712887138871488715887168871788718887198872088721887228872388724887258872688727887288872988730887318873288733887348873588736887378873888739887408874188742887438874488745887468874788748887498875088751887528875388754887558875688757887588875988760887618876288763887648876588766887678876888769887708877188772887738877488775887768877788778887798878088781887828878388784887858878688787887888878988790887918879288793887948879588796887978879888799888008880188802888038880488805888068880788808888098881088811888128881388814888158881688817888188881988820888218882288823888248882588826888278882888829888308883188832888338883488835888368883788838888398884088841888428884388844888458884688847888488884988850888518885288853888548885588856888578885888859888608886188862888638886488865888668886788868888698887088871888728887388874888758887688877888788887988880888818888288883888848888588886888878888888889888908889188892888938889488895888968889788898888998890088901889028890388904889058890688907889088890988910889118891288913889148891588916889178891888919889208892188922889238892488925889268892788928889298893088931889328893388934889358893688937889388893988940889418894288943889448894588946889478894888949889508895188952889538895488955889568895788958889598896088961889628896388964889658896688967889688896988970889718897288973889748897588976889778897888979889808898188982889838898488985889868898788988889898899088991889928899388994889958899688997889988899989000890018900289003890048900589006890078900889009890108901189012890138901489015890168901789018890198902089021890228902389024890258902689027890288902989030890318903289033890348903589036890378903889039890408904189042890438904489045890468904789048890498905089051890528905389054890558905689057890588905989060890618906289063890648906589066890678906889069890708907189072890738907489075890768907789078890798908089081890828908389084890858908689087890888908989090890918909289093890948909589096890978909889099891008910189102891038910489105891068910789108891098911089111891128911389114891158911689117891188911989120891218912289123891248912589126891278912889129891308913189132891338913489135891368913789138891398914089141891428914389144891458914689147891488914989150891518915289153891548915589156891578915889159891608916189162891638916489165891668916789168891698917089171891728917389174891758917689177891788917989180891818918289183891848918589186891878918889189891908919189192891938919489195891968919789198891998920089201892028920389204892058920689207892088920989210892118921289213892148921589216892178921889219892208922189222892238922489225892268922789228892298923089231892328923389234892358923689237892388923989240892418924289243892448924589246892478924889249892508925189252892538925489255892568925789258892598926089261892628926389264892658926689267892688926989270892718927289273892748927589276892778927889279892808928189282892838928489285892868928789288892898929089291892928929389294892958929689297892988929989300893018930289303893048930589306893078930889309893108931189312893138931489315893168931789318893198932089321893228932389324893258932689327893288932989330893318933289333893348933589336893378933889339893408934189342893438934489345893468934789348893498935089351893528935389354893558935689357893588935989360893618936289363893648936589366893678936889369893708937189372893738937489375893768937789378893798938089381893828938389384893858938689387893888938989390893918939289393893948939589396893978939889399894008940189402894038940489405894068940789408894098941089411894128941389414894158941689417894188941989420894218942289423894248942589426894278942889429894308943189432894338943489435894368943789438894398944089441894428944389444894458944689447894488944989450894518945289453894548945589456894578945889459894608946189462894638946489465894668946789468894698947089471894728947389474894758947689477894788947989480894818948289483894848948589486894878948889489894908949189492894938949489495894968949789498894998950089501895028950389504895058950689507895088950989510895118951289513895148951589516895178951889519895208952189522895238952489525895268952789528895298953089531895328953389534895358953689537895388953989540895418954289543895448954589546895478954889549895508955189552895538955489555895568955789558895598956089561895628956389564895658956689567895688956989570895718957289573895748957589576895778957889579895808958189582895838958489585895868958789588895898959089591895928959389594895958959689597895988959989600896018960289603896048960589606896078960889609896108961189612896138961489615896168961789618896198962089621896228962389624896258962689627896288962989630896318963289633896348963589636896378963889639896408964189642896438964489645896468964789648896498965089651896528965389654896558965689657896588965989660896618966289663896648966589666896678966889669896708967189672896738967489675896768967789678896798968089681896828968389684896858968689687896888968989690896918969289693896948969589696896978969889699897008970189702897038970489705897068970789708897098971089711897128971389714897158971689717897188971989720897218972289723897248972589726897278972889729897308973189732897338973489735897368973789738897398974089741897428974389744897458974689747897488974989750897518975289753897548975589756897578975889759897608976189762897638976489765897668976789768897698977089771897728977389774897758977689777897788977989780897818978289783897848978589786897878978889789897908979189792897938979489795897968979789798897998980089801898028980389804898058980689807898088980989810898118981289813898148981589816898178981889819898208982189822898238982489825898268982789828898298983089831898328983389834898358983689837898388983989840898418984289843898448984589846898478984889849898508985189852898538985489855898568985789858898598986089861898628986389864898658986689867898688986989870898718987289873898748987589876898778987889879898808988189882898838988489885898868988789888898898989089891898928989389894898958989689897898988989989900899018990289903899048990589906899078990889909899108991189912899138991489915899168991789918899198992089921899228992389924899258992689927899288992989930899318993289933899348993589936899378993889939899408994189942899438994489945899468994789948899498995089951899528995389954899558995689957899588995989960899618996289963899648996589966899678996889969899708997189972899738997489975899768997789978899798998089981899828998389984899858998689987899888998989990899918999289993899948999589996899978999889999900009000190002900039000490005900069000790008900099001090011900129001390014900159001690017900189001990020900219002290023900249002590026900279002890029900309003190032900339003490035900369003790038900399004090041900429004390044900459004690047900489004990050900519005290053900549005590056900579005890059900609006190062900639006490065900669006790068900699007090071900729007390074900759007690077900789007990080900819008290083900849008590086900879008890089900909009190092900939009490095900969009790098900999010090101901029010390104901059010690107901089010990110901119011290113901149011590116901179011890119901209012190122901239012490125901269012790128901299013090131901329013390134901359013690137901389013990140901419014290143901449014590146901479014890149901509015190152901539015490155901569015790158901599016090161901629016390164901659016690167901689016990170901719017290173901749017590176901779017890179901809018190182901839018490185901869018790188901899019090191901929019390194901959019690197901989019990200902019020290203902049020590206902079020890209902109021190212902139021490215902169021790218902199022090221902229022390224902259022690227902289022990230902319023290233902349023590236902379023890239902409024190242902439024490245902469024790248902499025090251902529025390254902559025690257902589025990260902619026290263902649026590266902679026890269902709027190272902739027490275902769027790278902799028090281902829028390284902859028690287902889028990290902919029290293902949029590296902979029890299903009030190302903039030490305903069030790308903099031090311903129031390314903159031690317903189031990320903219032290323903249032590326903279032890329903309033190332903339033490335903369033790338903399034090341903429034390344903459034690347903489034990350903519035290353903549035590356903579035890359903609036190362903639036490365903669036790368903699037090371903729037390374903759037690377903789037990380903819038290383903849038590386903879038890389903909039190392903939039490395903969039790398903999040090401904029040390404904059040690407904089040990410904119041290413904149041590416904179041890419904209042190422904239042490425904269042790428904299043090431904329043390434904359043690437904389043990440904419044290443904449044590446904479044890449904509045190452904539045490455904569045790458904599046090461904629046390464904659046690467904689046990470904719047290473904749047590476904779047890479904809048190482904839048490485904869048790488904899049090491904929049390494904959049690497904989049990500905019050290503905049050590506905079050890509905109051190512905139051490515905169051790518905199052090521905229052390524905259052690527905289052990530905319053290533905349053590536905379053890539905409054190542905439054490545905469054790548905499055090551905529055390554905559055690557905589055990560905619056290563905649056590566905679056890569905709057190572905739057490575905769057790578905799058090581905829058390584905859058690587905889058990590905919059290593905949059590596905979059890599906009060190602906039060490605906069060790608906099061090611906129061390614906159061690617906189061990620906219062290623906249062590626906279062890629906309063190632906339063490635906369063790638906399064090641906429064390644906459064690647906489064990650906519065290653906549065590656906579065890659906609066190662906639066490665906669066790668906699067090671906729067390674906759067690677906789067990680906819068290683906849068590686906879068890689906909069190692906939069490695906969069790698906999070090701907029070390704907059070690707907089070990710907119071290713907149071590716907179071890719907209072190722907239072490725907269072790728907299073090731907329073390734907359073690737907389073990740907419074290743907449074590746907479074890749907509075190752907539075490755907569075790758907599076090761907629076390764907659076690767907689076990770907719077290773907749077590776907779077890779907809078190782907839078490785907869078790788907899079090791907929079390794907959079690797907989079990800908019080290803908049080590806908079080890809908109081190812908139081490815908169081790818908199082090821908229082390824908259082690827908289082990830908319083290833908349083590836908379083890839908409084190842908439084490845908469084790848908499085090851908529085390854908559085690857908589085990860908619086290863908649086590866908679086890869908709087190872908739087490875908769087790878908799088090881908829088390884908859088690887908889088990890908919089290893908949089590896908979089890899909009090190902909039090490905909069090790908909099091090911909129091390914909159091690917909189091990920909219092290923909249092590926909279092890929909309093190932909339093490935909369093790938909399094090941909429094390944909459094690947909489094990950909519095290953909549095590956909579095890959909609096190962909639096490965909669096790968909699097090971909729097390974909759097690977909789097990980909819098290983909849098590986909879098890989909909099190992909939099490995909969099790998909999100091001910029100391004910059100691007910089100991010910119101291013910149101591016910179101891019910209102191022910239102491025910269102791028910299103091031910329103391034910359103691037910389103991040910419104291043910449104591046910479104891049910509105191052910539105491055910569105791058910599106091061910629106391064910659106691067910689106991070910719107291073910749107591076910779107891079910809108191082910839108491085910869108791088910899109091091910929109391094910959109691097910989109991100911019110291103911049110591106911079110891109911109111191112911139111491115911169111791118911199112091121911229112391124911259112691127911289112991130911319113291133911349113591136911379113891139911409114191142911439114491145911469114791148911499115091151911529115391154911559115691157911589115991160911619116291163911649116591166911679116891169911709117191172911739117491175911769117791178911799118091181911829118391184911859118691187911889118991190911919119291193911949119591196911979119891199912009120191202912039120491205912069120791208912099121091211912129121391214912159121691217912189121991220912219122291223912249122591226912279122891229912309123191232912339123491235912369123791238912399124091241912429124391244912459124691247912489124991250912519125291253912549125591256912579125891259912609126191262912639126491265912669126791268912699127091271912729127391274912759127691277912789127991280912819128291283912849128591286912879128891289912909129191292912939129491295912969129791298912999130091301913029130391304913059130691307913089130991310913119131291313913149131591316913179131891319913209132191322913239132491325913269132791328913299133091331913329133391334913359133691337913389133991340913419134291343913449134591346913479134891349913509135191352913539135491355913569135791358913599136091361913629136391364913659136691367913689136991370913719137291373913749137591376913779137891379913809138191382913839138491385913869138791388913899139091391913929139391394913959139691397913989139991400914019140291403914049140591406914079140891409914109141191412914139141491415914169141791418914199142091421914229142391424914259142691427914289142991430914319143291433914349143591436914379143891439914409144191442914439144491445914469144791448914499145091451914529145391454914559145691457914589145991460914619146291463914649146591466914679146891469914709147191472914739147491475914769147791478914799148091481914829148391484914859148691487914889148991490914919149291493914949149591496914979149891499915009150191502915039150491505915069150791508915099151091511915129151391514915159151691517915189151991520915219152291523915249152591526915279152891529915309153191532915339153491535915369153791538915399154091541915429154391544915459154691547915489154991550915519155291553915549155591556915579155891559915609156191562915639156491565915669156791568915699157091571915729157391574915759157691577915789157991580915819158291583915849158591586915879158891589915909159191592915939159491595915969159791598915999160091601916029160391604916059160691607916089160991610916119161291613916149161591616916179161891619916209162191622916239162491625916269162791628916299163091631916329163391634916359163691637916389163991640916419164291643916449164591646916479164891649916509165191652916539165491655916569165791658916599166091661916629166391664916659166691667916689166991670916719167291673916749167591676916779167891679916809168191682916839168491685916869168791688916899169091691916929169391694916959169691697916989169991700917019170291703917049170591706917079170891709917109171191712917139171491715917169171791718917199172091721917229172391724917259172691727917289172991730917319173291733917349173591736917379173891739917409174191742917439174491745917469174791748917499175091751917529175391754917559175691757917589175991760917619176291763917649176591766917679176891769917709177191772917739177491775917769177791778917799178091781917829178391784917859178691787917889178991790917919179291793917949179591796917979179891799918009180191802918039180491805918069180791808918099181091811918129181391814918159181691817918189181991820918219182291823918249182591826918279182891829918309183191832918339183491835918369183791838918399184091841918429184391844918459184691847918489184991850918519185291853918549185591856918579185891859918609186191862918639186491865918669186791868918699187091871918729187391874918759187691877918789187991880918819188291883918849188591886918879188891889918909189191892918939189491895918969189791898918999190091901919029190391904919059190691907919089190991910919119191291913919149191591916919179191891919919209192191922919239192491925919269192791928919299193091931919329193391934919359193691937919389193991940919419194291943919449194591946919479194891949919509195191952919539195491955919569195791958919599196091961919629196391964919659196691967919689196991970919719197291973919749197591976919779197891979919809198191982919839198491985919869198791988919899199091991919929199391994919959199691997919989199992000920019200292003920049200592006920079200892009920109201192012920139201492015920169201792018920199202092021920229202392024920259202692027920289202992030920319203292033920349203592036920379203892039920409204192042920439204492045920469204792048920499205092051920529205392054920559205692057920589205992060920619206292063920649206592066920679206892069920709207192072920739207492075920769207792078920799208092081920829208392084920859208692087920889208992090920919209292093920949209592096920979209892099921009210192102921039210492105921069210792108921099211092111921129211392114921159211692117921189211992120921219212292123921249212592126921279212892129921309213192132921339213492135921369213792138921399214092141921429214392144921459214692147921489214992150921519215292153921549215592156921579215892159921609216192162921639216492165921669216792168921699217092171921729217392174921759217692177921789217992180921819218292183921849218592186921879218892189921909219192192921939219492195921969219792198921999220092201922029220392204922059220692207922089220992210922119221292213922149221592216922179221892219922209222192222922239222492225922269222792228922299223092231922329223392234922359223692237922389223992240922419224292243922449224592246922479224892249922509225192252922539225492255922569225792258922599226092261922629226392264922659226692267922689226992270922719227292273922749227592276922779227892279922809228192282922839228492285922869228792288922899229092291922929229392294922959229692297922989229992300923019230292303923049230592306923079230892309923109231192312923139231492315923169231792318923199232092321923229232392324923259232692327923289232992330923319233292333923349233592336923379233892339923409234192342923439234492345923469234792348923499235092351923529235392354923559235692357923589235992360923619236292363923649236592366923679236892369923709237192372923739237492375923769237792378923799238092381923829238392384923859238692387923889238992390923919239292393923949239592396923979239892399924009240192402
  1. // Copyright 2015-2021 The Khronos Group Inc.
  2. //
  3. // SPDX-License-Identifier: Apache-2.0 OR MIT
  4. //
  5. // This header is generated from the Khronos Vulkan XML API Registry.
  6. #ifndef VULKAN_HPP
  7. #define VULKAN_HPP
  8. #if defined( _MSVC_LANG )
  9. # define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
  10. #else
  11. # define VULKAN_HPP_CPLUSPLUS __cplusplus
  12. #endif
  13. #if 201703L < VULKAN_HPP_CPLUSPLUS
  14. # define VULKAN_HPP_CPP_VERSION 20
  15. #elif 201402L < VULKAN_HPP_CPLUSPLUS
  16. # define VULKAN_HPP_CPP_VERSION 17
  17. #elif 201103L < VULKAN_HPP_CPLUSPLUS
  18. # define VULKAN_HPP_CPP_VERSION 14
  19. #elif 199711L < VULKAN_HPP_CPLUSPLUS
  20. # define VULKAN_HPP_CPP_VERSION 11
  21. #else
  22. # error "vulkan.hpp needs at least c++ standard version 11"
  23. #endif
  24. #include <algorithm>
  25. #include <array>
  26. #include <cstddef>
  27. #include <cstdint>
  28. #include <cstring>
  29. #include <functional>
  30. #include <initializer_list>
  31. #include <sstream>
  32. #include <string>
  33. #include <system_error>
  34. #include <tuple>
  35. #include <type_traits>
  36. #include <vulkan/vulkan.h>
  37. #if 17 <= VULKAN_HPP_CPP_VERSION
  38. #include <string_view>
  39. #endif
  40. #if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  41. # if !defined(VULKAN_HPP_NO_SMART_HANDLE)
  42. # define VULKAN_HPP_NO_SMART_HANDLE
  43. # endif
  44. #else
  45. # include <memory>
  46. # include <vector>
  47. #endif
  48. #if !defined(VULKAN_HPP_ASSERT)
  49. # include <cassert>
  50. # define VULKAN_HPP_ASSERT assert
  51. #endif
  52. #if !defined(VULKAN_HPP_ASSERT_ON_RESULT)
  53. # define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
  54. #endif
  55. #if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
  56. # define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
  57. #endif
  58. #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
  59. # if defined( __linux__ ) || defined( __APPLE__ )
  60. # include <dlfcn.h>
  61. # elif defined( _WIN32 )
  62. typedef struct HINSTANCE__ * HINSTANCE;
  63. # if defined( _WIN64 )
  64. typedef int64_t( __stdcall * FARPROC )();
  65. # else
  66. typedef int( __stdcall * FARPROC )();
  67. # endif
  68. extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
  69. extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
  70. extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
  71. # endif
  72. #endif
  73. #if !defined(__has_include)
  74. # define __has_include(x) false
  75. #endif
  76. #if ( 201711 <= __cpp_impl_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
  77. # define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
  78. #endif
  79. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  80. # include <compare>
  81. #endif
  82. static_assert( VK_HEADER_VERSION == 169 , "Wrong VK_HEADER_VERSION!" );
  83. // 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
  84. // To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
  85. #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
  86. # if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
  87. # define VULKAN_HPP_TYPESAFE_CONVERSION
  88. # endif
  89. #endif
  90. // <tuple> includes <sys/sysmacros.h> through some other header
  91. // this results in major(x) being resolved to gnu_dev_major(x)
  92. // which is an expression in a constructor initializer list.
  93. #if defined(major)
  94. #undef major
  95. #endif
  96. #if defined(minor)
  97. #undef minor
  98. #endif
  99. // Windows defines MemoryBarrier which is deprecated and collides
  100. // with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
  101. #if defined(MemoryBarrier)
  102. #undef MemoryBarrier
  103. #endif
  104. #if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
  105. # if defined(__clang__)
  106. # if __has_feature(cxx_unrestricted_unions)
  107. # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  108. # endif
  109. # elif defined(__GNUC__)
  110. # define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
  111. # if 40600 <= GCC_VERSION
  112. # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  113. # endif
  114. # elif defined(_MSC_VER)
  115. # if 1900 <= _MSC_VER
  116. # define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  117. # endif
  118. # endif
  119. #endif
  120. #if !defined(VULKAN_HPP_INLINE)
  121. # if defined(__clang__)
  122. # if __has_attribute(always_inline)
  123. # define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
  124. # else
  125. # define VULKAN_HPP_INLINE inline
  126. # endif
  127. # elif defined(__GNUC__)
  128. # define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
  129. # elif defined(_MSC_VER)
  130. # define VULKAN_HPP_INLINE inline
  131. # else
  132. # define VULKAN_HPP_INLINE inline
  133. # endif
  134. #endif
  135. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  136. # define VULKAN_HPP_TYPESAFE_EXPLICIT
  137. #else
  138. # define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
  139. #endif
  140. #if defined(__cpp_constexpr)
  141. # define VULKAN_HPP_CONSTEXPR constexpr
  142. # if __cpp_constexpr >= 201304
  143. # define VULKAN_HPP_CONSTEXPR_14 constexpr
  144. # else
  145. # define VULKAN_HPP_CONSTEXPR_14
  146. # endif
  147. # define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
  148. #else
  149. # define VULKAN_HPP_CONSTEXPR
  150. # define VULKAN_HPP_CONSTEXPR_14
  151. # define VULKAN_HPP_CONST_OR_CONSTEXPR const
  152. #endif
  153. #if !defined(VULKAN_HPP_NOEXCEPT)
  154. # if defined(_MSC_VER) && (_MSC_VER <= 1800)
  155. # define VULKAN_HPP_NOEXCEPT
  156. # else
  157. # define VULKAN_HPP_NOEXCEPT noexcept
  158. # define VULKAN_HPP_HAS_NOEXCEPT 1
  159. # if defined(VULKAN_HPP_NO_EXCEPTIONS)
  160. # define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
  161. # else
  162. # define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  163. # endif
  164. # endif
  165. #endif
  166. #if 14 <= VULKAN_HPP_CPP_VERSION
  167. # define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
  168. #else
  169. # define VULKAN_HPP_DEPRECATED( msg )
  170. #endif
  171. #if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
  172. # define VULKAN_HPP_NODISCARD [[nodiscard]]
  173. # if defined(VULKAN_HPP_NO_EXCEPTIONS)
  174. # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
  175. # else
  176. # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
  177. # endif
  178. #else
  179. # define VULKAN_HPP_NODISCARD
  180. # define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
  181. #endif
  182. #if !defined(VULKAN_HPP_NAMESPACE)
  183. #define VULKAN_HPP_NAMESPACE vk
  184. #endif
  185. #define VULKAN_HPP_STRINGIFY2(text) #text
  186. #define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text)
  187. #define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE)
  188. namespace VULKAN_HPP_NAMESPACE
  189. {
  190. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  191. template <typename T>
  192. class ArrayProxy
  193. {
  194. public:
  195. VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
  196. : m_count( 0 )
  197. , m_ptr( nullptr )
  198. {}
  199. VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  200. : m_count( 0 )
  201. , m_ptr( nullptr )
  202. {}
  203. ArrayProxy( T & value ) VULKAN_HPP_NOEXCEPT
  204. : m_count( 1 )
  205. , m_ptr( &value )
  206. {}
  207. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  208. ArrayProxy( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
  209. : m_count( 1 )
  210. , m_ptr( &value )
  211. {}
  212. ArrayProxy( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
  213. : m_count( count )
  214. , m_ptr( ptr )
  215. {}
  216. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  217. ArrayProxy( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
  218. : m_count( count )
  219. , m_ptr( ptr )
  220. {}
  221. #if __GNUC__ >= 9
  222. #pragma GCC diagnostic push
  223. #pragma GCC diagnostic ignored "-Winit-list-lifetime"
  224. #endif
  225. ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
  226. : m_count( static_cast<uint32_t>( list.size() ) )
  227. , m_ptr( list.begin() )
  228. {}
  229. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  230. ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
  231. : m_count( static_cast<uint32_t>( list.size() ) )
  232. , m_ptr( list.begin() )
  233. {}
  234. ArrayProxy( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
  235. : m_count( static_cast<uint32_t>( list.size() ) )
  236. , m_ptr( list.begin() )
  237. {}
  238. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  239. ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
  240. : m_count( static_cast<uint32_t>( list.size() ) )
  241. , m_ptr( list.begin() )
  242. {}
  243. #if __GNUC__ >= 9
  244. #pragma GCC diagnostic pop
  245. #endif
  246. template <size_t N>
  247. ArrayProxy( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
  248. : m_count( N )
  249. , m_ptr( data.data() )
  250. {}
  251. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  252. ArrayProxy( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
  253. : m_count( N )
  254. , m_ptr( data.data() )
  255. {}
  256. template <size_t N>
  257. ArrayProxy( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
  258. : m_count( N )
  259. , m_ptr( data.data() )
  260. {}
  261. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  262. ArrayProxy( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
  263. : m_count( N )
  264. , m_ptr( data.data() )
  265. {}
  266. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  267. ArrayProxy( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
  268. : m_count( static_cast<uint32_t>( data.size() ) )
  269. , m_ptr( data.data() )
  270. {}
  271. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  272. typename B = T,
  273. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  274. ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
  275. : m_count( static_cast<uint32_t>( data.size() ) )
  276. , m_ptr( data.data() )
  277. {}
  278. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  279. ArrayProxy( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
  280. : m_count( static_cast<uint32_t>( data.size() ) )
  281. , m_ptr( data.data() )
  282. {}
  283. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  284. typename B = T,
  285. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  286. ArrayProxy( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
  287. : m_count( static_cast<uint32_t>( data.size() ) )
  288. , m_ptr( data.data() )
  289. {}
  290. const T * begin() const VULKAN_HPP_NOEXCEPT
  291. {
  292. return m_ptr;
  293. }
  294. const T * end() const VULKAN_HPP_NOEXCEPT
  295. {
  296. return m_ptr + m_count;
  297. }
  298. const T & front() const VULKAN_HPP_NOEXCEPT
  299. {
  300. VULKAN_HPP_ASSERT( m_count && m_ptr );
  301. return *m_ptr;
  302. }
  303. const T & back() const VULKAN_HPP_NOEXCEPT
  304. {
  305. VULKAN_HPP_ASSERT( m_count && m_ptr );
  306. return *( m_ptr + m_count - 1 );
  307. }
  308. bool empty() const VULKAN_HPP_NOEXCEPT
  309. {
  310. return ( m_count == 0 );
  311. }
  312. uint32_t size() const VULKAN_HPP_NOEXCEPT
  313. {
  314. return m_count;
  315. }
  316. T * data() const VULKAN_HPP_NOEXCEPT
  317. {
  318. return m_ptr;
  319. }
  320. private:
  321. uint32_t m_count;
  322. T * m_ptr;
  323. };
  324. template <typename T>
  325. class ArrayProxyNoTemporaries
  326. {
  327. public:
  328. VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
  329. : m_count( 0 )
  330. , m_ptr( nullptr )
  331. {}
  332. VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  333. : m_count( 0 )
  334. , m_ptr( nullptr )
  335. {}
  336. ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
  337. : m_count( 1 )
  338. , m_ptr( &value )
  339. {}
  340. ArrayProxyNoTemporaries( T && value ) = delete;
  341. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  342. ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
  343. : m_count( 1 )
  344. , m_ptr( &value )
  345. {}
  346. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  347. ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;
  348. ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
  349. : m_count( count )
  350. , m_ptr( ptr )
  351. {}
  352. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  353. ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
  354. : m_count( count )
  355. , m_ptr( ptr )
  356. {}
  357. ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
  358. : m_count( static_cast<uint32_t>( list.size() ) )
  359. , m_ptr( list.begin() )
  360. {}
  361. ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
  362. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  363. ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list )
  364. VULKAN_HPP_NOEXCEPT
  365. : m_count( static_cast<uint32_t>( list.size() ) )
  366. , m_ptr( list.begin() )
  367. {}
  368. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  369. ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
  370. ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
  371. : m_count( static_cast<uint32_t>( list.size() ) )
  372. , m_ptr( list.begin() )
  373. {}
  374. ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
  375. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  376. ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
  377. : m_count( static_cast<uint32_t>( list.size() ) )
  378. , m_ptr( list.begin() )
  379. {}
  380. template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  381. ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
  382. template <size_t N>
  383. ArrayProxyNoTemporaries( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
  384. : m_count( N )
  385. , m_ptr( data.data() )
  386. {}
  387. template <size_t N>
  388. ArrayProxyNoTemporaries( std::array<T, N> const && data ) = delete;
  389. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  390. ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const & data ) VULKAN_HPP_NOEXCEPT
  391. : m_count( N )
  392. , m_ptr( data.data() )
  393. {}
  394. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  395. ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> const && data ) = delete;
  396. template <size_t N>
  397. ArrayProxyNoTemporaries( std::array<T, N> & data ) VULKAN_HPP_NOEXCEPT
  398. : m_count( N )
  399. , m_ptr( data.data() )
  400. {}
  401. template <size_t N>
  402. ArrayProxyNoTemporaries( std::array<T, N> && data ) = delete;
  403. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  404. ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> & data ) VULKAN_HPP_NOEXCEPT
  405. : m_count( N )
  406. , m_ptr( data.data() )
  407. {}
  408. template <size_t N, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  409. ArrayProxyNoTemporaries( std::array<typename std::remove_const<T>::type, N> && data ) = delete;
  410. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  411. ArrayProxyNoTemporaries( std::vector<T, Allocator> const & data ) VULKAN_HPP_NOEXCEPT
  412. : m_count( static_cast<uint32_t>( data.size() ) )
  413. , m_ptr( data.data() )
  414. {}
  415. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  416. ArrayProxyNoTemporaries( std::vector<T, Allocator> const && data ) = delete;
  417. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  418. typename B = T,
  419. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  420. ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const & data )
  421. VULKAN_HPP_NOEXCEPT
  422. : m_count( static_cast<uint32_t>( data.size() ) )
  423. , m_ptr( data.data() )
  424. {}
  425. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  426. typename B = T,
  427. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  428. ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> const && data ) = delete;
  429. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  430. ArrayProxyNoTemporaries( std::vector<T, Allocator> & data ) VULKAN_HPP_NOEXCEPT
  431. : m_count( static_cast<uint32_t>( data.size() ) )
  432. , m_ptr( data.data() )
  433. {}
  434. template <class Allocator = std::allocator<typename std::remove_const<T>::type>>
  435. ArrayProxyNoTemporaries( std::vector<T, Allocator> && data ) = delete;
  436. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  437. typename B = T,
  438. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  439. ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> & data ) VULKAN_HPP_NOEXCEPT
  440. : m_count( static_cast<uint32_t>( data.size() ) )
  441. , m_ptr( data.data() )
  442. {}
  443. template <class Allocator = std::allocator<typename std::remove_const<T>::type>,
  444. typename B = T,
  445. typename std::enable_if<std::is_const<B>::value, int>::type = 0>
  446. ArrayProxyNoTemporaries( std::vector<typename std::remove_const<T>::type, Allocator> && data ) = delete;
  447. const T * begin() const VULKAN_HPP_NOEXCEPT
  448. {
  449. return m_ptr;
  450. }
  451. const T * end() const VULKAN_HPP_NOEXCEPT
  452. {
  453. return m_ptr + m_count;
  454. }
  455. const T & front() const VULKAN_HPP_NOEXCEPT
  456. {
  457. VULKAN_HPP_ASSERT( m_count && m_ptr );
  458. return *m_ptr;
  459. }
  460. const T & back() const VULKAN_HPP_NOEXCEPT
  461. {
  462. VULKAN_HPP_ASSERT( m_count && m_ptr );
  463. return *( m_ptr + m_count - 1 );
  464. }
  465. bool empty() const VULKAN_HPP_NOEXCEPT
  466. {
  467. return ( m_count == 0 );
  468. }
  469. uint32_t size() const VULKAN_HPP_NOEXCEPT
  470. {
  471. return m_count;
  472. }
  473. T * data() const VULKAN_HPP_NOEXCEPT
  474. {
  475. return m_ptr;
  476. }
  477. private:
  478. uint32_t m_count;
  479. T * m_ptr;
  480. };
  481. #endif
  482. template <typename T, size_t N>
  483. class ArrayWrapper1D : public std::array<T,N>
  484. {
  485. public:
  486. VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT
  487. : std::array<T, N>()
  488. {}
  489. VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array<T,N> const& data) VULKAN_HPP_NOEXCEPT
  490. : std::array<T, N>(data)
  491. {}
  492. #if defined(_WIN32) && !defined(_WIN64)
  493. VULKAN_HPP_CONSTEXPR T const& operator[](int index) const VULKAN_HPP_NOEXCEPT
  494. {
  495. return std::array<T, N>::operator[](index);
  496. }
  497. T & operator[](int index) VULKAN_HPP_NOEXCEPT
  498. {
  499. return std::array<T, N>::operator[](index);
  500. }
  501. #endif
  502. operator T const* () const VULKAN_HPP_NOEXCEPT
  503. {
  504. return this->data();
  505. }
  506. operator T * () VULKAN_HPP_NOEXCEPT
  507. {
  508. return this->data();
  509. }
  510. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  511. operator std::string() const
  512. {
  513. return std::string( this->data() );
  514. }
  515. #if 17 <= VULKAN_HPP_CPP_VERSION
  516. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  517. operator std::string_view() const
  518. {
  519. return std::string_view( this->data() );
  520. }
  521. #endif
  522. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  523. bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  524. {
  525. return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
  526. }
  527. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  528. bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  529. {
  530. return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
  531. }
  532. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  533. bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  534. {
  535. return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
  536. }
  537. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  538. bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  539. {
  540. return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
  541. }
  542. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  543. bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  544. {
  545. return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
  546. }
  547. template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
  548. bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
  549. {
  550. return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
  551. }
  552. };
  553. // specialization of relational operators between std::string and arrays of chars
  554. template <size_t N>
  555. bool operator<(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  556. {
  557. return lhs < rhs.data();
  558. }
  559. template <size_t N>
  560. bool operator<=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  561. {
  562. return lhs <= rhs.data();
  563. }
  564. template <size_t N>
  565. bool operator>(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  566. {
  567. return lhs > rhs.data();
  568. }
  569. template <size_t N>
  570. bool operator>=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  571. {
  572. return lhs >= rhs.data();
  573. }
  574. template <size_t N>
  575. bool operator==(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  576. {
  577. return lhs == rhs.data();
  578. }
  579. template <size_t N>
  580. bool operator!=(std::string const& lhs, ArrayWrapper1D<char, N> const& rhs) VULKAN_HPP_NOEXCEPT
  581. {
  582. return lhs != rhs.data();
  583. }
  584. template <typename T, size_t N, size_t M>
  585. class ArrayWrapper2D : public std::array<ArrayWrapper1D<T,M>,N>
  586. {
  587. public:
  588. VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT
  589. : std::array<ArrayWrapper1D<T,M>, N>()
  590. {}
  591. VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array<std::array<T,M>,N> const& data) VULKAN_HPP_NOEXCEPT
  592. : std::array<ArrayWrapper1D<T,M>, N>(*reinterpret_cast<std::array<ArrayWrapper1D<T,M>,N> const*>(&data))
  593. {}
  594. };
  595. template <typename FlagBitsType> struct FlagTraits
  596. {
  597. enum { allFlags = 0 };
  598. };
  599. template <typename BitType>
  600. class Flags
  601. {
  602. public:
  603. using MaskType = typename std::underlying_type<BitType>::type;
  604. // constructors
  605. VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
  606. : m_mask(0)
  607. {}
  608. VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT
  609. : m_mask(static_cast<MaskType>(bit))
  610. {}
  611. VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT = default;
  612. VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT
  613. : m_mask(flags)
  614. {}
  615. // relational operators
  616. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  617. auto operator<=>(Flags<BitType> const&) const = default;
  618. #else
  619. VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  620. {
  621. return m_mask < rhs.m_mask;
  622. }
  623. VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  624. {
  625. return m_mask <= rhs.m_mask;
  626. }
  627. VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  628. {
  629. return m_mask > rhs.m_mask;
  630. }
  631. VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  632. {
  633. return m_mask >= rhs.m_mask;
  634. }
  635. VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  636. {
  637. return m_mask == rhs.m_mask;
  638. }
  639. VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  640. {
  641. return m_mask != rhs.m_mask;
  642. }
  643. #endif
  644. // logical operator
  645. VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
  646. {
  647. return !m_mask;
  648. }
  649. // bitwise operators
  650. VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  651. {
  652. return Flags<BitType>(m_mask & rhs.m_mask);
  653. }
  654. VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  655. {
  656. return Flags<BitType>(m_mask | rhs.m_mask);
  657. }
  658. VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const& rhs) const VULKAN_HPP_NOEXCEPT
  659. {
  660. return Flags<BitType>(m_mask ^ rhs.m_mask);
  661. }
  662. VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
  663. {
  664. return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
  665. }
  666. // assignment operators
  667. VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT = default;
  668. VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
  669. {
  670. m_mask |= rhs.m_mask;
  671. return *this;
  672. }
  673. VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
  674. {
  675. m_mask &= rhs.m_mask;
  676. return *this;
  677. }
  678. VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=(Flags<BitType> const& rhs) VULKAN_HPP_NOEXCEPT
  679. {
  680. m_mask ^= rhs.m_mask;
  681. return *this;
  682. }
  683. // cast operators
  684. explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
  685. {
  686. return !!m_mask;
  687. }
  688. explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
  689. {
  690. return m_mask;
  691. }
  692. #if defined(VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC)
  693. public:
  694. #else
  695. private:
  696. #endif
  697. MaskType m_mask;
  698. };
  699. #if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  700. // relational operators only needed for pre C++20
  701. template <typename BitType>
  702. VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  703. {
  704. return flags.operator>( bit );
  705. }
  706. template <typename BitType>
  707. VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  708. {
  709. return flags.operator>=( bit );
  710. }
  711. template <typename BitType>
  712. VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  713. {
  714. return flags.operator<( bit );
  715. }
  716. template <typename BitType>
  717. VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  718. {
  719. return flags.operator<=(bit);
  720. }
  721. template <typename BitType>
  722. VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  723. {
  724. return flags.operator==( bit );
  725. }
  726. template <typename BitType>
  727. VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  728. {
  729. return flags.operator!=( bit );
  730. }
  731. #endif
  732. // bitwise operators
  733. template <typename BitType>
  734. VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  735. {
  736. return flags.operator&( bit );
  737. }
  738. template <typename BitType>
  739. VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  740. {
  741. return flags.operator|( bit );
  742. }
  743. template <typename BitType>
  744. VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const& flags) VULKAN_HPP_NOEXCEPT
  745. {
  746. return flags.operator^( bit );
  747. }
  748. template <typename RefType>
  749. class Optional
  750. {
  751. public:
  752. Optional(RefType & reference) VULKAN_HPP_NOEXCEPT { m_ptr = &reference; }
  753. Optional(RefType * ptr) VULKAN_HPP_NOEXCEPT { m_ptr = ptr; }
  754. Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT { m_ptr = nullptr; }
  755. operator RefType*() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
  756. RefType const* operator->() const VULKAN_HPP_NOEXCEPT { return m_ptr; }
  757. explicit operator bool() const VULKAN_HPP_NOEXCEPT { return !!m_ptr; }
  758. private:
  759. RefType *m_ptr;
  760. };
  761. template <typename X, typename Y> struct StructExtends { enum { value = false }; };
  762. template<typename Type, class...>
  763. struct IsPartOfStructureChain
  764. {
  765. static const bool valid = false;
  766. };
  767. template<typename Type, typename Head, typename... Tail>
  768. struct IsPartOfStructureChain<Type, Head, Tail...>
  769. {
  770. static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
  771. };
  772. template <size_t Index, typename T, typename... ChainElements>
  773. struct StructureChainContains
  774. {
  775. static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
  776. StructureChainContains<Index - 1, T, ChainElements...>::value;
  777. };
  778. template <typename T, typename... ChainElements>
  779. struct StructureChainContains<0, T, ChainElements...>
  780. {
  781. static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
  782. };
  783. template <size_t Index, typename... ChainElements>
  784. struct StructureChainValidation
  785. {
  786. using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
  787. static const bool valid =
  788. StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
  789. ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
  790. StructureChainValidation<Index - 1, ChainElements...>::valid;
  791. };
  792. template <typename... ChainElements>
  793. struct StructureChainValidation<0, ChainElements...>
  794. {
  795. static const bool valid = true;
  796. };
  797. template <typename... ChainElements>
  798. class StructureChain : public std::tuple<ChainElements...>
  799. {
  800. public:
  801. StructureChain() VULKAN_HPP_NOEXCEPT
  802. {
  803. static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
  804. "The structure chain is not valid!" );
  805. link<sizeof...( ChainElements ) - 1>();
  806. }
  807. StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
  808. {
  809. static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
  810. "The structure chain is not valid!" );
  811. link<sizeof...( ChainElements ) - 1>();
  812. }
  813. StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
  814. : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
  815. {
  816. static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
  817. "The structure chain is not valid!" );
  818. link<sizeof...( ChainElements ) - 1>();
  819. }
  820. StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
  821. {
  822. static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
  823. "The structure chain is not valid!" );
  824. link<sizeof...( ChainElements ) - 1>();
  825. }
  826. StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
  827. {
  828. std::tuple<ChainElements...>::operator=( rhs );
  829. link<sizeof...( ChainElements ) - 1>();
  830. return *this;
  831. }
  832. StructureChain & operator=( StructureChain && rhs ) = delete;
  833. template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
  834. T & get() VULKAN_HPP_NOEXCEPT
  835. {
  836. return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...>&>( *this ) );
  837. }
  838. template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
  839. T const & get() const VULKAN_HPP_NOEXCEPT
  840. {
  841. return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>( static_cast<std::tuple<ChainElements...> const &>( *this ) );
  842. }
  843. template <typename T0, typename T1, typename... Ts>
  844. std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
  845. {
  846. return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
  847. }
  848. template <typename T0, typename T1, typename... Ts>
  849. std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
  850. {
  851. return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
  852. }
  853. template <typename ClassType, size_t Which = 0>
  854. typename std::enable_if<
  855. std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
  856. ( Which == 0 ),
  857. bool>::type
  858. isLinked() const VULKAN_HPP_NOEXCEPT
  859. {
  860. return true;
  861. }
  862. template <typename ClassType, size_t Which = 0>
  863. typename std::enable_if<
  864. !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
  865. ( Which != 0 ),
  866. bool>::type
  867. isLinked() const VULKAN_HPP_NOEXCEPT
  868. {
  869. static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
  870. "Can't unlink Structure that's not part of this StructureChain!" );
  871. return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
  872. }
  873. template <typename ClassType, size_t Which = 0>
  874. typename std::enable_if<
  875. !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
  876. ( Which != 0 ),
  877. void>::type relink() VULKAN_HPP_NOEXCEPT
  878. {
  879. static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
  880. "Can't relink Structure that's not part of this StructureChain!" );
  881. auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
  882. VULKAN_HPP_ASSERT( !isLinked( pNext ) );
  883. auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
  884. pNext->pNext = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext );
  885. headElement.pNext = pNext;
  886. }
  887. template <typename ClassType, size_t Which = 0>
  888. typename std::enable_if<
  889. !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
  890. ( Which != 0 ),
  891. void>::type unlink() VULKAN_HPP_NOEXCEPT
  892. {
  893. static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
  894. "Can't unlink Structure that's not part of this StructureChain!" );
  895. unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
  896. }
  897. private:
  898. template <int Index, typename T, int Which, typename, class First, class... Types>
  899. struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
  900. {};
  901. template <int Index, typename T, int Which, class First, class... Types>
  902. struct ChainElementIndex<Index,
  903. T,
  904. Which,
  905. typename std::enable_if<!std::is_same<T, First>::value, void>::type,
  906. First,
  907. Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
  908. {};
  909. template <int Index, typename T, int Which, class First, class... Types>
  910. struct ChainElementIndex<Index,
  911. T,
  912. Which,
  913. typename std::enable_if<std::is_same<T, First>::value, void>::type,
  914. First,
  915. Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
  916. {};
  917. template <int Index, typename T, class First, class... Types>
  918. struct ChainElementIndex<Index,
  919. T,
  920. 0,
  921. typename std::enable_if<std::is_same<T, First>::value, void>::type,
  922. First,
  923. Types...> : std::integral_constant<int, Index>
  924. {};
  925. bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
  926. {
  927. VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const *>(
  928. &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
  929. while ( elementPtr )
  930. {
  931. if ( elementPtr->pNext == pNext )
  932. {
  933. return true;
  934. }
  935. elementPtr = elementPtr->pNext;
  936. }
  937. return false;
  938. }
  939. template <size_t Index>
  940. typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
  941. {
  942. auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...>&>( *this ) );
  943. x.pNext = &std::get<Index>( static_cast<std::tuple<ChainElements...>&>( *this ) );
  944. link<Index - 1>();
  945. }
  946. template <size_t Index>
  947. typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
  948. {}
  949. void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
  950. {
  951. VkBaseOutStructure * elementPtr = reinterpret_cast<VkBaseOutStructure *>(
  952. &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
  953. while ( elementPtr && ( elementPtr->pNext != pNext ) )
  954. {
  955. elementPtr = elementPtr->pNext;
  956. }
  957. if ( elementPtr )
  958. {
  959. elementPtr->pNext = pNext->pNext;
  960. }
  961. else
  962. {
  963. VULKAN_HPP_ASSERT( false ); // fires, if the ClassType member has already been unlinked !
  964. }
  965. }
  966. };
  967. #if !defined(VULKAN_HPP_NO_SMART_HANDLE)
  968. template <typename Type, typename Dispatch> class UniqueHandleTraits;
  969. template <typename Type, typename Dispatch>
  970. class UniqueHandle : public UniqueHandleTraits<Type,Dispatch>::deleter
  971. {
  972. private:
  973. using Deleter = typename UniqueHandleTraits<Type,Dispatch>::deleter;
  974. public:
  975. using element_type = Type;
  976. UniqueHandle()
  977. : Deleter()
  978. , m_value()
  979. {}
  980. explicit UniqueHandle( Type const& value, Deleter const& deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
  981. : Deleter( deleter)
  982. , m_value( value )
  983. {}
  984. UniqueHandle( UniqueHandle const& ) = delete;
  985. UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
  986. : Deleter( std::move( static_cast<Deleter&>( other ) ) )
  987. , m_value( other.release() )
  988. {}
  989. ~UniqueHandle() VULKAN_HPP_NOEXCEPT
  990. {
  991. if ( m_value ) this->destroy( m_value );
  992. }
  993. UniqueHandle & operator=( UniqueHandle const& ) = delete;
  994. UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
  995. {
  996. reset( other.release() );
  997. *static_cast<Deleter*>(this) = std::move( static_cast<Deleter&>(other) );
  998. return *this;
  999. }
  1000. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  1001. {
  1002. return m_value.operator bool();
  1003. }
  1004. Type const* operator->() const VULKAN_HPP_NOEXCEPT
  1005. {
  1006. return &m_value;
  1007. }
  1008. Type * operator->() VULKAN_HPP_NOEXCEPT
  1009. {
  1010. return &m_value;
  1011. }
  1012. Type const& operator*() const VULKAN_HPP_NOEXCEPT
  1013. {
  1014. return m_value;
  1015. }
  1016. Type & operator*() VULKAN_HPP_NOEXCEPT
  1017. {
  1018. return m_value;
  1019. }
  1020. const Type & get() const VULKAN_HPP_NOEXCEPT
  1021. {
  1022. return m_value;
  1023. }
  1024. Type & get() VULKAN_HPP_NOEXCEPT
  1025. {
  1026. return m_value;
  1027. }
  1028. void reset( Type const& value = Type() ) VULKAN_HPP_NOEXCEPT
  1029. {
  1030. if ( m_value != value )
  1031. {
  1032. if ( m_value ) this->destroy( m_value );
  1033. m_value = value;
  1034. }
  1035. }
  1036. Type release() VULKAN_HPP_NOEXCEPT
  1037. {
  1038. Type value = m_value;
  1039. m_value = nullptr;
  1040. return value;
  1041. }
  1042. void swap( UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
  1043. {
  1044. std::swap(m_value, rhs.m_value);
  1045. std::swap(static_cast<Deleter&>(*this), static_cast<Deleter&>(rhs));
  1046. }
  1047. private:
  1048. Type m_value;
  1049. };
  1050. template <typename UniqueType>
  1051. VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw(std::vector<UniqueType> const& handles)
  1052. {
  1053. std::vector<typename UniqueType::element_type> newBuffer(handles.size());
  1054. std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const& handle) { return handle.get(); });
  1055. return newBuffer;
  1056. }
  1057. template <typename Type, typename Dispatch>
  1058. VULKAN_HPP_INLINE void swap( UniqueHandle<Type,Dispatch> & lhs, UniqueHandle<Type,Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
  1059. {
  1060. lhs.swap( rhs );
  1061. }
  1062. #endif
  1063. #if !defined(VK_NO_PROTOTYPES)
  1064. class DispatchLoaderStatic
  1065. {
  1066. public:
  1067. #ifdef VK_USE_PLATFORM_WIN32_KHR
  1068. VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
  1069. {
  1070. return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
  1071. }
  1072. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1073. VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
  1074. {
  1075. return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
  1076. }
  1077. VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex ) const VULKAN_HPP_NOEXCEPT
  1078. {
  1079. return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
  1080. }
  1081. VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration ) const VULKAN_HPP_NOEXCEPT
  1082. {
  1083. return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
  1084. }
  1085. VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1086. {
  1087. return ::vkAcquireProfilingLockKHR( device, pInfo );
  1088. }
  1089. #ifdef VK_USE_PLATFORM_WIN32_KHR
  1090. VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
  1091. {
  1092. return ::vkAcquireWinrtDisplayNV( physicalDevice, display );
  1093. }
  1094. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1095. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  1096. VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
  1097. {
  1098. return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
  1099. }
  1100. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  1101. VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
  1102. {
  1103. return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
  1104. }
  1105. VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
  1106. {
  1107. return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
  1108. }
  1109. VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory ) const VULKAN_HPP_NOEXCEPT
  1110. {
  1111. return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
  1112. }
  1113. VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) const VULKAN_HPP_NOEXCEPT
  1114. {
  1115. return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
  1116. }
  1117. VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos ) const VULKAN_HPP_NOEXCEPT
  1118. {
  1119. return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
  1120. }
  1121. VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
  1122. {
  1123. return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
  1124. }
  1125. VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
  1126. {
  1127. return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
  1128. }
  1129. VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
  1130. {
  1131. return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
  1132. }
  1133. VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
  1134. {
  1135. return ::vkBindImageMemory( device, image, memory, memoryOffset );
  1136. }
  1137. VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
  1138. {
  1139. return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
  1140. }
  1141. VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos ) const VULKAN_HPP_NOEXCEPT
  1142. {
  1143. return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
  1144. }
  1145. VkResult vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
  1146. {
  1147. return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
  1148. }
  1149. void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
  1150. {
  1151. return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
  1152. }
  1153. void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
  1154. {
  1155. return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
  1156. }
  1157. void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
  1158. {
  1159. return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
  1160. }
  1161. void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT
  1162. {
  1163. return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
  1164. }
  1165. void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
  1166. {
  1167. return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
  1168. }
  1169. void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
  1170. {
  1171. return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
  1172. }
  1173. void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
  1174. {
  1175. return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
  1176. }
  1177. void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
  1178. {
  1179. return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
  1180. }
  1181. void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
  1182. {
  1183. return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
  1184. }
  1185. void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
  1186. {
  1187. return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
  1188. }
  1189. void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
  1190. {
  1191. return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
  1192. }
  1193. void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
  1194. {
  1195. return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
  1196. }
  1197. void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
  1198. {
  1199. return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
  1200. }
  1201. void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes ) const VULKAN_HPP_NOEXCEPT
  1202. {
  1203. return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
  1204. }
  1205. void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets ) const VULKAN_HPP_NOEXCEPT
  1206. {
  1207. return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
  1208. }
  1209. void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides ) const VULKAN_HPP_NOEXCEPT
  1210. {
  1211. return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
  1212. }
  1213. void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
  1214. {
  1215. return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
  1216. }
  1217. void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2KHR* pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
  1218. {
  1219. return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
  1220. }
  1221. void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
  1222. {
  1223. return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
  1224. }
  1225. void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
  1226. {
  1227. return ::vkCmdBuildAccelerationStructuresIndirectKHR( commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
  1228. }
  1229. void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
  1230. {
  1231. return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
  1232. }
  1233. void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects ) const VULKAN_HPP_NOEXCEPT
  1234. {
  1235. return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
  1236. }
  1237. void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
  1238. {
  1239. return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
  1240. }
  1241. void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges ) const VULKAN_HPP_NOEXCEPT
  1242. {
  1243. return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
  1244. }
  1245. void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1246. {
  1247. return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
  1248. }
  1249. void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
  1250. {
  1251. return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
  1252. }
  1253. void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1254. {
  1255. return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
  1256. }
  1257. void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
  1258. {
  1259. return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
  1260. }
  1261. void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2KHR* pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
  1262. {
  1263. return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
  1264. }
  1265. void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
  1266. {
  1267. return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
  1268. }
  1269. void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2KHR* pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
  1270. {
  1271. return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
  1272. }
  1273. void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
  1274. {
  1275. return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
  1276. }
  1277. void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2KHR* pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
  1278. {
  1279. return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
  1280. }
  1281. void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions ) const VULKAN_HPP_NOEXCEPT
  1282. {
  1283. return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
  1284. }
  1285. void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2KHR* pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
  1286. {
  1287. return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
  1288. }
  1289. void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1290. {
  1291. return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
  1292. }
  1293. void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
  1294. {
  1295. return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
  1296. }
  1297. void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
  1298. {
  1299. return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
  1300. }
  1301. void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
  1302. {
  1303. return ::vkCmdDebugMarkerEndEXT( commandBuffer );
  1304. }
  1305. void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
  1306. {
  1307. return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
  1308. }
  1309. void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  1310. {
  1311. return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
  1312. }
  1313. void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  1314. {
  1315. return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  1316. }
  1317. void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
  1318. {
  1319. return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  1320. }
  1321. void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
  1322. {
  1323. return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
  1324. }
  1325. void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
  1326. {
  1327. return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
  1328. }
  1329. void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
  1330. {
  1331. return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  1332. }
  1333. void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1334. {
  1335. return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
  1336. }
  1337. void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1338. {
  1339. return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1340. }
  1341. void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1342. {
  1343. return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1344. }
  1345. void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1346. {
  1347. return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1348. }
  1349. void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1350. {
  1351. return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
  1352. }
  1353. void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
  1354. {
  1355. return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
  1356. }
  1357. void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1358. {
  1359. return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1360. }
  1361. void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1362. {
  1363. return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1364. }
  1365. void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1366. {
  1367. return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1368. }
  1369. void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1370. {
  1371. return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
  1372. }
  1373. void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
  1374. {
  1375. return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
  1376. }
  1377. void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
  1378. {
  1379. return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
  1380. }
  1381. void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
  1382. {
  1383. return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
  1384. }
  1385. void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
  1386. {
  1387. return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
  1388. }
  1389. void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
  1390. {
  1391. return ::vkCmdEndQuery( commandBuffer, queryPool, query );
  1392. }
  1393. void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
  1394. {
  1395. return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
  1396. }
  1397. void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
  1398. {
  1399. return ::vkCmdEndRenderPass( commandBuffer );
  1400. }
  1401. void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  1402. {
  1403. return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
  1404. }
  1405. void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  1406. {
  1407. return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
  1408. }
  1409. void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
  1410. {
  1411. return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
  1412. }
  1413. void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
  1414. {
  1415. return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
  1416. }
  1417. void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
  1418. {
  1419. return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
  1420. }
  1421. void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
  1422. {
  1423. return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
  1424. }
  1425. void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
  1426. {
  1427. return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
  1428. }
  1429. void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
  1430. {
  1431. return ::vkCmdNextSubpass( commandBuffer, contents );
  1432. }
  1433. void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  1434. {
  1435. return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
  1436. }
  1437. void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
  1438. {
  1439. return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
  1440. }
  1441. void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
  1442. {
  1443. return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
  1444. }
  1445. void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
  1446. {
  1447. return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
  1448. }
  1449. void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues ) const VULKAN_HPP_NOEXCEPT
  1450. {
  1451. return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
  1452. }
  1453. void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
  1454. {
  1455. return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
  1456. }
  1457. void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData ) const VULKAN_HPP_NOEXCEPT
  1458. {
  1459. return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
  1460. }
  1461. void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
  1462. {
  1463. return ::vkCmdResetEvent( commandBuffer, event, stageMask );
  1464. }
  1465. void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  1466. {
  1467. return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
  1468. }
  1469. void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions ) const VULKAN_HPP_NOEXCEPT
  1470. {
  1471. return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
  1472. }
  1473. void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2KHR* pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
  1474. {
  1475. return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
  1476. }
  1477. void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
  1478. {
  1479. return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
  1480. }
  1481. void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void* pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
  1482. {
  1483. return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
  1484. }
  1485. void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
  1486. {
  1487. return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
  1488. }
  1489. void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
  1490. {
  1491. return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
  1492. }
  1493. void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
  1494. {
  1495. return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  1496. }
  1497. void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
  1498. {
  1499. return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
  1500. }
  1501. void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
  1502. {
  1503. return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
  1504. }
  1505. void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
  1506. {
  1507. return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
  1508. }
  1509. void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
  1510. {
  1511. return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
  1512. }
  1513. void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
  1514. {
  1515. return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
  1516. }
  1517. void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
  1518. {
  1519. return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
  1520. }
  1521. void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
  1522. {
  1523. return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
  1524. }
  1525. void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
  1526. {
  1527. return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
  1528. }
  1529. void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
  1530. {
  1531. return ::vkCmdSetEvent( commandBuffer, event, stageMask );
  1532. }
  1533. void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
  1534. {
  1535. return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
  1536. }
  1537. void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
  1538. {
  1539. return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps );
  1540. }
  1541. void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
  1542. {
  1543. return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
  1544. }
  1545. void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
  1546. {
  1547. return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
  1548. }
  1549. void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
  1550. {
  1551. return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
  1552. }
  1553. void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
  1554. {
  1555. return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
  1556. }
  1557. VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
  1558. {
  1559. return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
  1560. }
  1561. VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
  1562. {
  1563. return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
  1564. }
  1565. VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
  1566. {
  1567. return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
  1568. }
  1569. void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
  1570. {
  1571. return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
  1572. }
  1573. void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
  1574. {
  1575. return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize );
  1576. }
  1577. void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
  1578. {
  1579. return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
  1580. }
  1581. void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
  1582. {
  1583. return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
  1584. }
  1585. void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors ) const VULKAN_HPP_NOEXCEPT
  1586. {
  1587. return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
  1588. }
  1589. void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
  1590. {
  1591. return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
  1592. }
  1593. void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
  1594. {
  1595. return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
  1596. }
  1597. void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
  1598. {
  1599. return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
  1600. }
  1601. void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
  1602. {
  1603. return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
  1604. }
  1605. void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
  1606. {
  1607. return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
  1608. }
  1609. void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
  1610. {
  1611. return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
  1612. }
  1613. void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
  1614. {
  1615. return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
  1616. }
  1617. void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
  1618. {
  1619. return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
  1620. }
  1621. void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports ) const VULKAN_HPP_NOEXCEPT
  1622. {
  1623. return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
  1624. }
  1625. void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
  1626. {
  1627. return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress );
  1628. }
  1629. void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
  1630. {
  1631. return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
  1632. }
  1633. void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
  1634. {
  1635. return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
  1636. }
  1637. void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData ) const VULKAN_HPP_NOEXCEPT
  1638. {
  1639. return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
  1640. }
  1641. void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
  1642. {
  1643. return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
  1644. }
  1645. void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
  1646. {
  1647. return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
  1648. }
  1649. void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
  1650. {
  1651. return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
  1652. }
  1653. void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
  1654. {
  1655. return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
  1656. }
  1657. void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
  1658. {
  1659. return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
  1660. }
  1661. VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
  1662. {
  1663. return ::vkCompileDeferredNV( device, pipeline, shader );
  1664. }
  1665. VkResult vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1666. {
  1667. return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
  1668. }
  1669. VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1670. {
  1671. return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
  1672. }
  1673. VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  1674. {
  1675. return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
  1676. }
  1677. VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
  1678. {
  1679. return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
  1680. }
  1681. VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
  1682. {
  1683. return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
  1684. }
  1685. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  1686. VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1687. {
  1688. return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1689. }
  1690. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  1691. VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer ) const VULKAN_HPP_NOEXCEPT
  1692. {
  1693. return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
  1694. }
  1695. VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView ) const VULKAN_HPP_NOEXCEPT
  1696. {
  1697. return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
  1698. }
  1699. VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool ) const VULKAN_HPP_NOEXCEPT
  1700. {
  1701. return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
  1702. }
  1703. VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
  1704. {
  1705. return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
  1706. }
  1707. VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback ) const VULKAN_HPP_NOEXCEPT
  1708. {
  1709. return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
  1710. }
  1711. VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger ) const VULKAN_HPP_NOEXCEPT
  1712. {
  1713. return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
  1714. }
  1715. VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
  1716. {
  1717. return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
  1718. }
  1719. VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
  1720. {
  1721. return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
  1722. }
  1723. VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout ) const VULKAN_HPP_NOEXCEPT
  1724. {
  1725. return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
  1726. }
  1727. VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
  1728. {
  1729. return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
  1730. }
  1731. VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
  1732. {
  1733. return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
  1734. }
  1735. VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice ) const VULKAN_HPP_NOEXCEPT
  1736. {
  1737. return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
  1738. }
  1739. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  1740. VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1741. {
  1742. return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
  1743. }
  1744. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  1745. VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode ) const VULKAN_HPP_NOEXCEPT
  1746. {
  1747. return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
  1748. }
  1749. VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1750. {
  1751. return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1752. }
  1753. VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent ) const VULKAN_HPP_NOEXCEPT
  1754. {
  1755. return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
  1756. }
  1757. VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
  1758. {
  1759. return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
  1760. }
  1761. VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer ) const VULKAN_HPP_NOEXCEPT
  1762. {
  1763. return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
  1764. }
  1765. VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
  1766. {
  1767. return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
  1768. }
  1769. VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1770. {
  1771. return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
  1772. }
  1773. #ifdef VK_USE_PLATFORM_IOS_MVK
  1774. VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1775. {
  1776. return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
  1777. }
  1778. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  1779. VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage ) const VULKAN_HPP_NOEXCEPT
  1780. {
  1781. return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
  1782. }
  1783. #ifdef VK_USE_PLATFORM_FUCHSIA
  1784. VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1785. {
  1786. return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
  1787. }
  1788. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  1789. VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView ) const VULKAN_HPP_NOEXCEPT
  1790. {
  1791. return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
  1792. }
  1793. VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
  1794. {
  1795. return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
  1796. }
  1797. VkResult vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance ) const VULKAN_HPP_NOEXCEPT
  1798. {
  1799. return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
  1800. }
  1801. #ifdef VK_USE_PLATFORM_MACOS_MVK
  1802. VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1803. {
  1804. return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
  1805. }
  1806. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  1807. #ifdef VK_USE_PLATFORM_METAL_EXT
  1808. VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1809. {
  1810. return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
  1811. }
  1812. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  1813. VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache ) const VULKAN_HPP_NOEXCEPT
  1814. {
  1815. return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
  1816. }
  1817. VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
  1818. {
  1819. return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
  1820. }
  1821. VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlotEXT* pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
  1822. {
  1823. return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
  1824. }
  1825. VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool ) const VULKAN_HPP_NOEXCEPT
  1826. {
  1827. return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
  1828. }
  1829. VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
  1830. {
  1831. return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
  1832. }
  1833. VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines ) const VULKAN_HPP_NOEXCEPT
  1834. {
  1835. return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
  1836. }
  1837. VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
  1838. {
  1839. return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
  1840. }
  1841. VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
  1842. {
  1843. return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
  1844. }
  1845. VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass ) const VULKAN_HPP_NOEXCEPT
  1846. {
  1847. return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
  1848. }
  1849. VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler ) const VULKAN_HPP_NOEXCEPT
  1850. {
  1851. return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
  1852. }
  1853. VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
  1854. {
  1855. return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
  1856. }
  1857. VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
  1858. {
  1859. return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
  1860. }
  1861. VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore ) const VULKAN_HPP_NOEXCEPT
  1862. {
  1863. return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
  1864. }
  1865. VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule ) const VULKAN_HPP_NOEXCEPT
  1866. {
  1867. return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
  1868. }
  1869. VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains ) const VULKAN_HPP_NOEXCEPT
  1870. {
  1871. return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
  1872. }
  1873. #ifdef VK_USE_PLATFORM_GGP
  1874. VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1875. {
  1876. return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
  1877. }
  1878. #endif /*VK_USE_PLATFORM_GGP*/
  1879. VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain ) const VULKAN_HPP_NOEXCEPT
  1880. {
  1881. return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
  1882. }
  1883. VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache ) const VULKAN_HPP_NOEXCEPT
  1884. {
  1885. return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
  1886. }
  1887. #ifdef VK_USE_PLATFORM_VI_NN
  1888. VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1889. {
  1890. return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
  1891. }
  1892. #endif /*VK_USE_PLATFORM_VI_NN*/
  1893. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  1894. VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1895. {
  1896. return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1897. }
  1898. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  1899. #ifdef VK_USE_PLATFORM_WIN32_KHR
  1900. VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1901. {
  1902. return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1903. }
  1904. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  1905. #ifdef VK_USE_PLATFORM_XCB_KHR
  1906. VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1907. {
  1908. return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1909. }
  1910. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  1911. #ifdef VK_USE_PLATFORM_XLIB_KHR
  1912. VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface ) const VULKAN_HPP_NOEXCEPT
  1913. {
  1914. return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
  1915. }
  1916. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  1917. VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
  1918. {
  1919. return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
  1920. }
  1921. VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
  1922. {
  1923. return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
  1924. }
  1925. void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage ) const VULKAN_HPP_NOEXCEPT
  1926. {
  1927. return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
  1928. }
  1929. VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
  1930. {
  1931. return ::vkDeferredOperationJoinKHR( device, operation );
  1932. }
  1933. void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1934. {
  1935. return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
  1936. }
  1937. void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1938. {
  1939. return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
  1940. }
  1941. void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1942. {
  1943. return ::vkDestroyBuffer( device, buffer, pAllocator );
  1944. }
  1945. void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1946. {
  1947. return ::vkDestroyBufferView( device, bufferView, pAllocator );
  1948. }
  1949. void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1950. {
  1951. return ::vkDestroyCommandPool( device, commandPool, pAllocator );
  1952. }
  1953. void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1954. {
  1955. return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
  1956. }
  1957. void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1958. {
  1959. return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
  1960. }
  1961. void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1962. {
  1963. return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
  1964. }
  1965. void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1966. {
  1967. return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
  1968. }
  1969. void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1970. {
  1971. return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
  1972. }
  1973. void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1974. {
  1975. return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
  1976. }
  1977. void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1978. {
  1979. return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
  1980. }
  1981. void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1982. {
  1983. return ::vkDestroyDevice( device, pAllocator );
  1984. }
  1985. void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1986. {
  1987. return ::vkDestroyEvent( device, event, pAllocator );
  1988. }
  1989. void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1990. {
  1991. return ::vkDestroyFence( device, fence, pAllocator );
  1992. }
  1993. void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1994. {
  1995. return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
  1996. }
  1997. void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  1998. {
  1999. return ::vkDestroyImage( device, image, pAllocator );
  2000. }
  2001. void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2002. {
  2003. return ::vkDestroyImageView( device, imageView, pAllocator );
  2004. }
  2005. void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2006. {
  2007. return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
  2008. }
  2009. void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2010. {
  2011. return ::vkDestroyInstance( instance, pAllocator );
  2012. }
  2013. void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2014. {
  2015. return ::vkDestroyPipeline( device, pipeline, pAllocator );
  2016. }
  2017. void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2018. {
  2019. return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
  2020. }
  2021. void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2022. {
  2023. return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
  2024. }
  2025. void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlotEXT privateDataSlot, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2026. {
  2027. return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
  2028. }
  2029. void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2030. {
  2031. return ::vkDestroyQueryPool( device, queryPool, pAllocator );
  2032. }
  2033. void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2034. {
  2035. return ::vkDestroyRenderPass( device, renderPass, pAllocator );
  2036. }
  2037. void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2038. {
  2039. return ::vkDestroySampler( device, sampler, pAllocator );
  2040. }
  2041. void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2042. {
  2043. return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
  2044. }
  2045. void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2046. {
  2047. return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
  2048. }
  2049. void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2050. {
  2051. return ::vkDestroySemaphore( device, semaphore, pAllocator );
  2052. }
  2053. void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2054. {
  2055. return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
  2056. }
  2057. void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2058. {
  2059. return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
  2060. }
  2061. void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2062. {
  2063. return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
  2064. }
  2065. void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2066. {
  2067. return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
  2068. }
  2069. VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
  2070. {
  2071. return ::vkDeviceWaitIdle( device );
  2072. }
  2073. VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
  2074. {
  2075. return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
  2076. }
  2077. VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
  2078. {
  2079. return ::vkEndCommandBuffer( commandBuffer );
  2080. }
  2081. VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2082. {
  2083. return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
  2084. }
  2085. VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2086. {
  2087. return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
  2088. }
  2089. VkResult vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2090. {
  2091. return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
  2092. }
  2093. VkResult vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, VkLayerProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2094. {
  2095. return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
  2096. }
  2097. VkResult vkEnumerateInstanceVersion( uint32_t* pApiVersion ) const VULKAN_HPP_NOEXCEPT
  2098. {
  2099. return ::vkEnumerateInstanceVersion( pApiVersion );
  2100. }
  2101. VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
  2102. {
  2103. return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
  2104. }
  2105. VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
  2106. {
  2107. return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
  2108. }
  2109. VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
  2110. {
  2111. return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
  2112. }
  2113. VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
  2114. {
  2115. return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
  2116. }
  2117. VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
  2118. {
  2119. return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
  2120. }
  2121. void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
  2122. {
  2123. return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
  2124. }
  2125. VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
  2126. {
  2127. return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
  2128. }
  2129. void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator ) const VULKAN_HPP_NOEXCEPT
  2130. {
  2131. return ::vkFreeMemory( device, memory, pAllocator );
  2132. }
  2133. void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo ) const VULKAN_HPP_NOEXCEPT
  2134. {
  2135. return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
  2136. }
  2137. VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) const VULKAN_HPP_NOEXCEPT
  2138. {
  2139. return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
  2140. }
  2141. VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2142. {
  2143. return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
  2144. }
  2145. void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2146. {
  2147. return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
  2148. }
  2149. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  2150. VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties ) const VULKAN_HPP_NOEXCEPT
  2151. {
  2152. return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
  2153. }
  2154. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  2155. VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2156. {
  2157. return ::vkGetBufferDeviceAddress( device, pInfo );
  2158. }
  2159. VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2160. {
  2161. return ::vkGetBufferDeviceAddressEXT( device, pInfo );
  2162. }
  2163. VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2164. {
  2165. return ::vkGetBufferDeviceAddressKHR( device, pInfo );
  2166. }
  2167. void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2168. {
  2169. return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
  2170. }
  2171. void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2172. {
  2173. return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
  2174. }
  2175. void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2176. {
  2177. return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
  2178. }
  2179. uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2180. {
  2181. return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
  2182. }
  2183. uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2184. {
  2185. return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
  2186. }
  2187. VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
  2188. {
  2189. return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
  2190. }
  2191. uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
  2192. {
  2193. return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
  2194. }
  2195. VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
  2196. {
  2197. return ::vkGetDeferredOperationResultKHR( device, operation );
  2198. }
  2199. void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
  2200. {
  2201. return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
  2202. }
  2203. void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport ) const VULKAN_HPP_NOEXCEPT
  2204. {
  2205. return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
  2206. }
  2207. void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility ) const VULKAN_HPP_NOEXCEPT
  2208. {
  2209. return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
  2210. }
  2211. void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
  2212. {
  2213. return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
  2214. }
  2215. void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
  2216. {
  2217. return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
  2218. }
  2219. VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
  2220. {
  2221. return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
  2222. }
  2223. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2224. VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
  2225. {
  2226. return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
  2227. }
  2228. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2229. VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes ) const VULKAN_HPP_NOEXCEPT
  2230. {
  2231. return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
  2232. }
  2233. void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
  2234. {
  2235. return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
  2236. }
  2237. uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2238. {
  2239. return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
  2240. }
  2241. uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) const VULKAN_HPP_NOEXCEPT
  2242. {
  2243. return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
  2244. }
  2245. PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char* pName ) const VULKAN_HPP_NOEXCEPT
  2246. {
  2247. return ::vkGetDeviceProcAddr( device, pName );
  2248. }
  2249. void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
  2250. {
  2251. return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
  2252. }
  2253. void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue ) const VULKAN_HPP_NOEXCEPT
  2254. {
  2255. return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
  2256. }
  2257. VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2258. {
  2259. return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
  2260. }
  2261. VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2262. {
  2263. return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
  2264. }
  2265. VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
  2266. {
  2267. return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
  2268. }
  2269. VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities ) const VULKAN_HPP_NOEXCEPT
  2270. {
  2271. return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
  2272. }
  2273. VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays ) const VULKAN_HPP_NOEXCEPT
  2274. {
  2275. return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
  2276. }
  2277. VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
  2278. {
  2279. return ::vkGetEventStatus( device, event );
  2280. }
  2281. VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
  2282. {
  2283. return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
  2284. }
  2285. VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
  2286. {
  2287. return ::vkGetFenceStatus( device, fence );
  2288. }
  2289. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2290. VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
  2291. {
  2292. return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
  2293. }
  2294. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2295. void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2296. {
  2297. return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
  2298. }
  2299. VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties ) const VULKAN_HPP_NOEXCEPT
  2300. {
  2301. return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
  2302. }
  2303. void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2304. {
  2305. return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
  2306. }
  2307. void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2308. {
  2309. return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
  2310. }
  2311. void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2312. {
  2313. return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
  2314. }
  2315. void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2316. {
  2317. return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
  2318. }
  2319. void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2320. {
  2321. return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
  2322. }
  2323. void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
  2324. {
  2325. return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
  2326. }
  2327. void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout ) const VULKAN_HPP_NOEXCEPT
  2328. {
  2329. return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
  2330. }
  2331. VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties ) const VULKAN_HPP_NOEXCEPT
  2332. {
  2333. return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
  2334. }
  2335. uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX* pInfo ) const VULKAN_HPP_NOEXCEPT
  2336. {
  2337. return ::vkGetImageViewHandleNVX( device, pInfo );
  2338. }
  2339. PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char* pName ) const VULKAN_HPP_NOEXCEPT
  2340. {
  2341. return ::vkGetInstanceProcAddr( instance, pName );
  2342. }
  2343. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  2344. VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer ) const VULKAN_HPP_NOEXCEPT
  2345. {
  2346. return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
  2347. }
  2348. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  2349. VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
  2350. {
  2351. return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
  2352. }
  2353. VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
  2354. {
  2355. return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
  2356. }
  2357. VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
  2358. {
  2359. return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
  2360. }
  2361. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2362. VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
  2363. {
  2364. return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
  2365. }
  2366. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2367. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2368. VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
  2369. {
  2370. return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
  2371. }
  2372. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2373. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2374. VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
  2375. {
  2376. return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
  2377. }
  2378. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2379. VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
  2380. {
  2381. return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
  2382. }
  2383. VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue ) const VULKAN_HPP_NOEXCEPT
  2384. {
  2385. return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
  2386. }
  2387. VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains ) const VULKAN_HPP_NOEXCEPT
  2388. {
  2389. return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
  2390. }
  2391. VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties ) const VULKAN_HPP_NOEXCEPT
  2392. {
  2393. return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
  2394. }
  2395. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  2396. VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb ) const VULKAN_HPP_NOEXCEPT
  2397. {
  2398. return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
  2399. }
  2400. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  2401. VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2402. {
  2403. return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
  2404. }
  2405. VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2406. {
  2407. return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
  2408. }
  2409. VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2410. {
  2411. return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
  2412. }
  2413. VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2414. {
  2415. return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
  2416. }
  2417. void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
  2418. {
  2419. return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
  2420. }
  2421. void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
  2422. {
  2423. return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
  2424. }
  2425. void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
  2426. {
  2427. return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
  2428. }
  2429. void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
  2430. {
  2431. return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
  2432. }
  2433. VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2434. {
  2435. return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
  2436. }
  2437. void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
  2438. {
  2439. return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
  2440. }
  2441. void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
  2442. {
  2443. return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
  2444. }
  2445. void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) const VULKAN_HPP_NOEXCEPT
  2446. {
  2447. return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
  2448. }
  2449. void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
  2450. {
  2451. return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
  2452. }
  2453. void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) const VULKAN_HPP_NOEXCEPT
  2454. {
  2455. return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
  2456. }
  2457. void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2458. {
  2459. return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
  2460. }
  2461. void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2462. {
  2463. return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
  2464. }
  2465. void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2466. {
  2467. return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
  2468. }
  2469. VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
  2470. {
  2471. return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
  2472. }
  2473. VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2474. {
  2475. return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
  2476. }
  2477. VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2478. {
  2479. return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
  2480. }
  2481. VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
  2482. {
  2483. return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
  2484. }
  2485. void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
  2486. {
  2487. return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
  2488. }
  2489. void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
  2490. {
  2491. return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
  2492. }
  2493. void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
  2494. {
  2495. return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
  2496. }
  2497. void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
  2498. {
  2499. return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
  2500. }
  2501. VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects ) const VULKAN_HPP_NOEXCEPT
  2502. {
  2503. return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
  2504. }
  2505. void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2506. {
  2507. return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
  2508. }
  2509. void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
  2510. {
  2511. return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
  2512. }
  2513. void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
  2514. {
  2515. return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
  2516. }
  2517. void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses ) const VULKAN_HPP_NOEXCEPT
  2518. {
  2519. return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
  2520. }
  2521. void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
  2522. {
  2523. return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
  2524. }
  2525. void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
  2526. {
  2527. return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
  2528. }
  2529. void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
  2530. {
  2531. return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
  2532. }
  2533. void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties ) const VULKAN_HPP_NOEXCEPT
  2534. {
  2535. return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
  2536. }
  2537. void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
  2538. {
  2539. return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
  2540. }
  2541. void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties ) const VULKAN_HPP_NOEXCEPT
  2542. {
  2543. return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
  2544. }
  2545. VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations ) const VULKAN_HPP_NOEXCEPT
  2546. {
  2547. return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
  2548. }
  2549. VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
  2550. {
  2551. return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
  2552. }
  2553. VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
  2554. {
  2555. return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
  2556. }
  2557. VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
  2558. {
  2559. return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
  2560. }
  2561. VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
  2562. {
  2563. return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
  2564. }
  2565. VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
  2566. {
  2567. return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
  2568. }
  2569. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2570. VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
  2571. {
  2572. return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
  2573. }
  2574. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2575. VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes ) const VULKAN_HPP_NOEXCEPT
  2576. {
  2577. return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
  2578. }
  2579. VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported ) const VULKAN_HPP_NOEXCEPT
  2580. {
  2581. return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
  2582. }
  2583. VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolPropertiesEXT* pToolProperties ) const VULKAN_HPP_NOEXCEPT
  2584. {
  2585. return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
  2586. }
  2587. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  2588. VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display ) const VULKAN_HPP_NOEXCEPT
  2589. {
  2590. return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
  2591. }
  2592. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  2593. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2594. VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
  2595. {
  2596. return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
  2597. }
  2598. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2599. #ifdef VK_USE_PLATFORM_XCB_KHR
  2600. VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
  2601. {
  2602. return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
  2603. }
  2604. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  2605. #ifdef VK_USE_PLATFORM_XLIB_KHR
  2606. VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
  2607. {
  2608. return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
  2609. }
  2610. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  2611. VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2612. {
  2613. return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
  2614. }
  2615. VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
  2616. {
  2617. return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
  2618. }
  2619. VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties ) const VULKAN_HPP_NOEXCEPT
  2620. {
  2621. return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
  2622. }
  2623. VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics ) const VULKAN_HPP_NOEXCEPT
  2624. {
  2625. return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
  2626. }
  2627. void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t* pData ) const VULKAN_HPP_NOEXCEPT
  2628. {
  2629. return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
  2630. }
  2631. VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
  2632. {
  2633. return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
  2634. }
  2635. void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData ) const VULKAN_HPP_NOEXCEPT
  2636. {
  2637. return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
  2638. }
  2639. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  2640. VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
  2641. {
  2642. return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
  2643. }
  2644. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  2645. VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2646. {
  2647. return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
  2648. }
  2649. VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2650. {
  2651. return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
  2652. }
  2653. VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2654. {
  2655. return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
  2656. }
  2657. VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
  2658. {
  2659. return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader );
  2660. }
  2661. VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
  2662. {
  2663. return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
  2664. }
  2665. void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity ) const VULKAN_HPP_NOEXCEPT
  2666. {
  2667. return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
  2668. }
  2669. VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
  2670. {
  2671. return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
  2672. }
  2673. VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue ) const VULKAN_HPP_NOEXCEPT
  2674. {
  2675. return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
  2676. }
  2677. VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd ) const VULKAN_HPP_NOEXCEPT
  2678. {
  2679. return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
  2680. }
  2681. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2682. VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle ) const VULKAN_HPP_NOEXCEPT
  2683. {
  2684. return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
  2685. }
  2686. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2687. VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo ) const VULKAN_HPP_NOEXCEPT
  2688. {
  2689. return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
  2690. }
  2691. VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue ) const VULKAN_HPP_NOEXCEPT
  2692. {
  2693. return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
  2694. }
  2695. VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
  2696. {
  2697. return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
  2698. }
  2699. VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
  2700. {
  2701. return ::vkGetSwapchainStatusKHR( device, swapchain );
  2702. }
  2703. VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData ) const VULKAN_HPP_NOEXCEPT
  2704. {
  2705. return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
  2706. }
  2707. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2708. VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay ) const VULKAN_HPP_NOEXCEPT
  2709. {
  2710. return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay );
  2711. }
  2712. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2713. VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
  2714. {
  2715. return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
  2716. }
  2717. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2718. VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
  2719. {
  2720. return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
  2721. }
  2722. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2723. VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
  2724. {
  2725. return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
  2726. }
  2727. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2728. VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
  2729. {
  2730. return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
  2731. }
  2732. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2733. VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
  2734. {
  2735. return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
  2736. }
  2737. VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
  2738. {
  2739. return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
  2740. }
  2741. VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData ) const VULKAN_HPP_NOEXCEPT
  2742. {
  2743. return ::vkMapMemory( device, memory, offset, size, flags, ppData );
  2744. }
  2745. VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
  2746. {
  2747. return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
  2748. }
  2749. VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches ) const VULKAN_HPP_NOEXCEPT
  2750. {
  2751. return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
  2752. }
  2753. void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
  2754. {
  2755. return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
  2756. }
  2757. VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
  2758. {
  2759. return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
  2760. }
  2761. void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
  2762. {
  2763. return ::vkQueueEndDebugUtilsLabelEXT( queue );
  2764. }
  2765. void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) const VULKAN_HPP_NOEXCEPT
  2766. {
  2767. return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
  2768. }
  2769. VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) const VULKAN_HPP_NOEXCEPT
  2770. {
  2771. return ::vkQueuePresentKHR( queue, pPresentInfo );
  2772. }
  2773. VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
  2774. {
  2775. return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
  2776. }
  2777. VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
  2778. {
  2779. return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
  2780. }
  2781. VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
  2782. {
  2783. return ::vkQueueWaitIdle( queue );
  2784. }
  2785. VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
  2786. {
  2787. return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
  2788. }
  2789. VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence ) const VULKAN_HPP_NOEXCEPT
  2790. {
  2791. return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
  2792. }
  2793. VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
  2794. {
  2795. return ::vkReleaseDisplayEXT( physicalDevice, display );
  2796. }
  2797. #ifdef VK_USE_PLATFORM_WIN32_KHR
  2798. VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
  2799. {
  2800. return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
  2801. }
  2802. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  2803. VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
  2804. {
  2805. return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
  2806. }
  2807. void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
  2808. {
  2809. return ::vkReleaseProfilingLockKHR( device );
  2810. }
  2811. VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
  2812. {
  2813. return ::vkResetCommandBuffer( commandBuffer, flags );
  2814. }
  2815. VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
  2816. {
  2817. return ::vkResetCommandPool( device, commandPool, flags );
  2818. }
  2819. VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
  2820. {
  2821. return ::vkResetDescriptorPool( device, descriptorPool, flags );
  2822. }
  2823. VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
  2824. {
  2825. return ::vkResetEvent( device, event );
  2826. }
  2827. VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences ) const VULKAN_HPP_NOEXCEPT
  2828. {
  2829. return ::vkResetFences( device, fenceCount, pFences );
  2830. }
  2831. void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  2832. {
  2833. return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
  2834. }
  2835. void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
  2836. {
  2837. return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
  2838. }
  2839. VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) const VULKAN_HPP_NOEXCEPT
  2840. {
  2841. return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
  2842. }
  2843. VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) const VULKAN_HPP_NOEXCEPT
  2844. {
  2845. return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
  2846. }
  2847. VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
  2848. {
  2849. return ::vkSetEvent( device, event );
  2850. }
  2851. void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata ) const VULKAN_HPP_NOEXCEPT
  2852. {
  2853. return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
  2854. }
  2855. void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
  2856. {
  2857. return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
  2858. }
  2859. VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlotEXT privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
  2860. {
  2861. return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
  2862. }
  2863. VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
  2864. {
  2865. return ::vkSignalSemaphore( device, pSignalInfo );
  2866. }
  2867. VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) const VULKAN_HPP_NOEXCEPT
  2868. {
  2869. return ::vkSignalSemaphoreKHR( device, pSignalInfo );
  2870. }
  2871. void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData ) const VULKAN_HPP_NOEXCEPT
  2872. {
  2873. return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
  2874. }
  2875. void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
  2876. {
  2877. return ::vkTrimCommandPool( device, commandPool, flags );
  2878. }
  2879. void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
  2880. {
  2881. return ::vkTrimCommandPoolKHR( device, commandPool, flags );
  2882. }
  2883. void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
  2884. {
  2885. return ::vkUninitializePerformanceApiINTEL( device );
  2886. }
  2887. void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
  2888. {
  2889. return ::vkUnmapMemory( device, memory );
  2890. }
  2891. void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
  2892. {
  2893. return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
  2894. }
  2895. void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData ) const VULKAN_HPP_NOEXCEPT
  2896. {
  2897. return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
  2898. }
  2899. void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
  2900. {
  2901. return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
  2902. }
  2903. VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
  2904. {
  2905. return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
  2906. }
  2907. VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
  2908. {
  2909. return ::vkWaitSemaphores( device, pWaitInfo, timeout );
  2910. }
  2911. VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
  2912. {
  2913. return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
  2914. }
  2915. VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
  2916. {
  2917. return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
  2918. }
  2919. };
  2920. #endif
  2921. class DispatchLoaderDynamic;
  2922. #if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC)
  2923. # if defined(VK_NO_PROTOTYPES)
  2924. # define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
  2925. # else
  2926. # define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
  2927. # endif
  2928. #endif
  2929. #if !defined( VULKAN_HPP_STORAGE_API )
  2930. # if defined( VULKAN_HPP_STORAGE_SHARED )
  2931. # if defined( _MSC_VER )
  2932. # if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
  2933. # define VULKAN_HPP_STORAGE_API __declspec( dllexport )
  2934. # else
  2935. # define VULKAN_HPP_STORAGE_API __declspec( dllimport )
  2936. # endif
  2937. # elif defined( __clang__ ) || defined( __GNUC__ )
  2938. # if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
  2939. # define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) )
  2940. # else
  2941. # define VULKAN_HPP_STORAGE_API
  2942. # endif
  2943. # else
  2944. # define VULKAN_HPP_STORAGE_API
  2945. # pragma warning Unknown import / export semantics
  2946. # endif
  2947. # else
  2948. # define VULKAN_HPP_STORAGE_API
  2949. # endif
  2950. #endif
  2951. #if !defined(VULKAN_HPP_DEFAULT_DISPATCHER)
  2952. # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
  2953. # define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
  2954. # define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE namespace VULKAN_HPP_NAMESPACE { VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; }
  2955. extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
  2956. # else
  2957. # define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic()
  2958. # define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
  2959. # endif
  2960. #endif
  2961. #if !defined(VULKAN_HPP_DEFAULT_DISPATCHER_TYPE)
  2962. # if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
  2963. #define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
  2964. # else
  2965. # define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
  2966. # endif
  2967. #endif
  2968. #if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
  2969. # define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
  2970. # define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
  2971. # define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
  2972. #else
  2973. # define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT = {}
  2974. # define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
  2975. # define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT = VULKAN_HPP_DEFAULT_DISPATCHER
  2976. #endif
  2977. struct AllocationCallbacks;
  2978. template <typename OwnerType, typename Dispatch>
  2979. class ObjectDestroy
  2980. {
  2981. public:
  2982. ObjectDestroy() = default;
  2983. ObjectDestroy( OwnerType owner,
  2984. Optional<const AllocationCallbacks> allocationCallbacks
  2985. VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
  2986. Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
  2987. : m_owner( owner )
  2988. , m_allocationCallbacks( allocationCallbacks )
  2989. , m_dispatch( &dispatch )
  2990. {}
  2991. OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
  2992. Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
  2993. protected:
  2994. template <typename T>
  2995. void destroy(T t) VULKAN_HPP_NOEXCEPT
  2996. {
  2997. VULKAN_HPP_ASSERT( m_owner && m_dispatch );
  2998. m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
  2999. }
  3000. private:
  3001. OwnerType m_owner = {};
  3002. Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
  3003. Dispatch const * m_dispatch = nullptr;
  3004. };
  3005. class NoParent;
  3006. template <typename Dispatch>
  3007. class ObjectDestroy<NoParent,Dispatch>
  3008. {
  3009. public:
  3010. ObjectDestroy() = default;
  3011. ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
  3012. Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
  3013. : m_allocationCallbacks( allocationCallbacks )
  3014. , m_dispatch( &dispatch )
  3015. {}
  3016. Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
  3017. protected:
  3018. template <typename T>
  3019. void destroy(T t) VULKAN_HPP_NOEXCEPT
  3020. {
  3021. VULKAN_HPP_ASSERT( m_dispatch );
  3022. t.destroy( m_allocationCallbacks, *m_dispatch );
  3023. }
  3024. private:
  3025. Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
  3026. Dispatch const * m_dispatch = nullptr;
  3027. };
  3028. template <typename OwnerType, typename Dispatch>
  3029. class ObjectFree
  3030. {
  3031. public:
  3032. ObjectFree() = default;
  3033. ObjectFree( OwnerType owner,
  3034. Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
  3035. Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
  3036. : m_owner( owner )
  3037. , m_allocationCallbacks( allocationCallbacks )
  3038. , m_dispatch( &dispatch )
  3039. {}
  3040. OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
  3041. {
  3042. return m_owner;
  3043. }
  3044. Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
  3045. {
  3046. return m_allocationCallbacks;
  3047. }
  3048. protected:
  3049. template <typename T>
  3050. void destroy( T t ) VULKAN_HPP_NOEXCEPT
  3051. {
  3052. VULKAN_HPP_ASSERT( m_owner && m_dispatch );
  3053. m_owner.free( t, m_allocationCallbacks, *m_dispatch );
  3054. }
  3055. private:
  3056. OwnerType m_owner = {};
  3057. Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
  3058. Dispatch const * m_dispatch = nullptr;
  3059. };
  3060. template <typename OwnerType, typename Dispatch>
  3061. class ObjectRelease
  3062. {
  3063. public:
  3064. ObjectRelease() = default;
  3065. ObjectRelease( OwnerType owner, Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
  3066. : m_owner( owner )
  3067. , m_dispatch( &dispatch )
  3068. {}
  3069. OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
  3070. {
  3071. return m_owner;
  3072. }
  3073. protected:
  3074. template <typename T>
  3075. void destroy( T t ) VULKAN_HPP_NOEXCEPT
  3076. {
  3077. VULKAN_HPP_ASSERT( m_owner && m_dispatch );
  3078. m_owner.release( t, *m_dispatch );
  3079. }
  3080. private:
  3081. OwnerType m_owner = {};
  3082. Dispatch const * m_dispatch = nullptr;
  3083. };
  3084. template <typename OwnerType, typename PoolType, typename Dispatch>
  3085. class PoolFree
  3086. {
  3087. public:
  3088. PoolFree() = default;
  3089. PoolFree( OwnerType owner,
  3090. PoolType pool,
  3091. Dispatch const & dispatch = VULKAN_HPP_DEFAULT_DISPATCHER ) VULKAN_HPP_NOEXCEPT
  3092. : m_owner( owner )
  3093. , m_pool( pool )
  3094. , m_dispatch( &dispatch )
  3095. {}
  3096. OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
  3097. PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; }
  3098. protected:
  3099. template <typename T>
  3100. void destroy(T t) VULKAN_HPP_NOEXCEPT
  3101. {
  3102. m_owner.free( m_pool, t, *m_dispatch );
  3103. }
  3104. private:
  3105. OwnerType m_owner = OwnerType();
  3106. PoolType m_pool = PoolType();
  3107. Dispatch const * m_dispatch = nullptr;
  3108. };
  3109. using Bool32 = uint32_t;
  3110. using DeviceAddress = uint64_t;
  3111. using DeviceSize = uint64_t;
  3112. using SampleMask = uint32_t;
  3113. template <typename EnumType, EnumType value>
  3114. struct CppType
  3115. {};
  3116. template <typename Type>
  3117. struct isVulkanHandleType
  3118. {
  3119. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = false;
  3120. };
  3121. VULKAN_HPP_INLINE std::string toHexString( uint32_t value )
  3122. {
  3123. std::stringstream stream;
  3124. stream << std::hex << value;
  3125. return stream.str();
  3126. }
  3127. enum class AccelerationStructureBuildTypeKHR
  3128. {
  3129. eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
  3130. eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
  3131. eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR};
  3132. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureBuildTypeKHR value )
  3133. {
  3134. switch ( value )
  3135. {
  3136. case AccelerationStructureBuildTypeKHR::eHost : return "Host";
  3137. case AccelerationStructureBuildTypeKHR::eDevice : return "Device";
  3138. case AccelerationStructureBuildTypeKHR::eHostOrDevice : return "HostOrDevice";
  3139. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3140. }
  3141. }
  3142. enum class AccelerationStructureCompatibilityKHR
  3143. {
  3144. eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR,
  3145. eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR};
  3146. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCompatibilityKHR value )
  3147. {
  3148. switch ( value )
  3149. {
  3150. case AccelerationStructureCompatibilityKHR::eCompatible : return "Compatible";
  3151. case AccelerationStructureCompatibilityKHR::eIncompatible : return "Incompatible";
  3152. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3153. }
  3154. }
  3155. enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR
  3156. {
  3157. eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR};
  3158. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagBitsKHR value )
  3159. {
  3160. switch ( value )
  3161. {
  3162. case AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
  3163. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3164. }
  3165. }
  3166. enum class AccelerationStructureMemoryRequirementsTypeNV
  3167. {
  3168. eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
  3169. eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
  3170. eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV};
  3171. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureMemoryRequirementsTypeNV value )
  3172. {
  3173. switch ( value )
  3174. {
  3175. case AccelerationStructureMemoryRequirementsTypeNV::eObject : return "Object";
  3176. case AccelerationStructureMemoryRequirementsTypeNV::eBuildScratch : return "BuildScratch";
  3177. case AccelerationStructureMemoryRequirementsTypeNV::eUpdateScratch : return "UpdateScratch";
  3178. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3179. }
  3180. }
  3181. enum class AccelerationStructureTypeKHR
  3182. {
  3183. eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
  3184. eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
  3185. eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR};
  3186. using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
  3187. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureTypeKHR value )
  3188. {
  3189. switch ( value )
  3190. {
  3191. case AccelerationStructureTypeKHR::eTopLevel : return "TopLevel";
  3192. case AccelerationStructureTypeKHR::eBottomLevel : return "BottomLevel";
  3193. case AccelerationStructureTypeKHR::eGeneric : return "Generic";
  3194. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3195. }
  3196. }
  3197. enum class AccessFlagBits : VkAccessFlags
  3198. {
  3199. eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
  3200. eIndexRead = VK_ACCESS_INDEX_READ_BIT,
  3201. eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
  3202. eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
  3203. eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
  3204. eShaderRead = VK_ACCESS_SHADER_READ_BIT,
  3205. eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
  3206. eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
  3207. eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  3208. eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
  3209. eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
  3210. eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
  3211. eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
  3212. eHostRead = VK_ACCESS_HOST_READ_BIT,
  3213. eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
  3214. eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
  3215. eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
  3216. eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
  3217. eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
  3218. eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
  3219. eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
  3220. eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
  3221. eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
  3222. eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
  3223. eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV,
  3224. eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
  3225. eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
  3226. eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
  3227. eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
  3228. eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
  3229. eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR};
  3230. VULKAN_HPP_INLINE std::string to_string( AccessFlagBits value )
  3231. {
  3232. switch ( value )
  3233. {
  3234. case AccessFlagBits::eIndirectCommandRead : return "IndirectCommandRead";
  3235. case AccessFlagBits::eIndexRead : return "IndexRead";
  3236. case AccessFlagBits::eVertexAttributeRead : return "VertexAttributeRead";
  3237. case AccessFlagBits::eUniformRead : return "UniformRead";
  3238. case AccessFlagBits::eInputAttachmentRead : return "InputAttachmentRead";
  3239. case AccessFlagBits::eShaderRead : return "ShaderRead";
  3240. case AccessFlagBits::eShaderWrite : return "ShaderWrite";
  3241. case AccessFlagBits::eColorAttachmentRead : return "ColorAttachmentRead";
  3242. case AccessFlagBits::eColorAttachmentWrite : return "ColorAttachmentWrite";
  3243. case AccessFlagBits::eDepthStencilAttachmentRead : return "DepthStencilAttachmentRead";
  3244. case AccessFlagBits::eDepthStencilAttachmentWrite : return "DepthStencilAttachmentWrite";
  3245. case AccessFlagBits::eTransferRead : return "TransferRead";
  3246. case AccessFlagBits::eTransferWrite : return "TransferWrite";
  3247. case AccessFlagBits::eHostRead : return "HostRead";
  3248. case AccessFlagBits::eHostWrite : return "HostWrite";
  3249. case AccessFlagBits::eMemoryRead : return "MemoryRead";
  3250. case AccessFlagBits::eMemoryWrite : return "MemoryWrite";
  3251. case AccessFlagBits::eTransformFeedbackWriteEXT : return "TransformFeedbackWriteEXT";
  3252. case AccessFlagBits::eTransformFeedbackCounterReadEXT : return "TransformFeedbackCounterReadEXT";
  3253. case AccessFlagBits::eTransformFeedbackCounterWriteEXT : return "TransformFeedbackCounterWriteEXT";
  3254. case AccessFlagBits::eConditionalRenderingReadEXT : return "ConditionalRenderingReadEXT";
  3255. case AccessFlagBits::eColorAttachmentReadNoncoherentEXT : return "ColorAttachmentReadNoncoherentEXT";
  3256. case AccessFlagBits::eAccelerationStructureReadKHR : return "AccelerationStructureReadKHR";
  3257. case AccessFlagBits::eAccelerationStructureWriteKHR : return "AccelerationStructureWriteKHR";
  3258. case AccessFlagBits::eShadingRateImageReadNV : return "ShadingRateImageReadNV";
  3259. case AccessFlagBits::eFragmentDensityMapReadEXT : return "FragmentDensityMapReadEXT";
  3260. case AccessFlagBits::eCommandPreprocessReadNV : return "CommandPreprocessReadNV";
  3261. case AccessFlagBits::eCommandPreprocessWriteNV : return "CommandPreprocessWriteNV";
  3262. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3263. }
  3264. }
  3265. enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
  3266. {};
  3267. VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagBitsKHR )
  3268. {
  3269. return "(void)";
  3270. }
  3271. enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
  3272. {
  3273. eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT};
  3274. VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlagBits value )
  3275. {
  3276. switch ( value )
  3277. {
  3278. case AttachmentDescriptionFlagBits::eMayAlias : return "MayAlias";
  3279. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3280. }
  3281. }
  3282. enum class AttachmentLoadOp
  3283. {
  3284. eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
  3285. eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
  3286. eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE};
  3287. VULKAN_HPP_INLINE std::string to_string( AttachmentLoadOp value )
  3288. {
  3289. switch ( value )
  3290. {
  3291. case AttachmentLoadOp::eLoad : return "Load";
  3292. case AttachmentLoadOp::eClear : return "Clear";
  3293. case AttachmentLoadOp::eDontCare : return "DontCare";
  3294. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3295. }
  3296. }
  3297. enum class AttachmentStoreOp
  3298. {
  3299. eStore = VK_ATTACHMENT_STORE_OP_STORE,
  3300. eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  3301. eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM};
  3302. VULKAN_HPP_INLINE std::string to_string( AttachmentStoreOp value )
  3303. {
  3304. switch ( value )
  3305. {
  3306. case AttachmentStoreOp::eStore : return "Store";
  3307. case AttachmentStoreOp::eDontCare : return "DontCare";
  3308. case AttachmentStoreOp::eNoneQCOM : return "NoneQCOM";
  3309. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3310. }
  3311. }
  3312. enum class BlendFactor
  3313. {
  3314. eZero = VK_BLEND_FACTOR_ZERO,
  3315. eOne = VK_BLEND_FACTOR_ONE,
  3316. eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
  3317. eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
  3318. eDstColor = VK_BLEND_FACTOR_DST_COLOR,
  3319. eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
  3320. eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
  3321. eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
  3322. eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
  3323. eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
  3324. eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
  3325. eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
  3326. eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
  3327. eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
  3328. eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
  3329. eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
  3330. eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
  3331. eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
  3332. eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA};
  3333. VULKAN_HPP_INLINE std::string to_string( BlendFactor value )
  3334. {
  3335. switch ( value )
  3336. {
  3337. case BlendFactor::eZero : return "Zero";
  3338. case BlendFactor::eOne : return "One";
  3339. case BlendFactor::eSrcColor : return "SrcColor";
  3340. case BlendFactor::eOneMinusSrcColor : return "OneMinusSrcColor";
  3341. case BlendFactor::eDstColor : return "DstColor";
  3342. case BlendFactor::eOneMinusDstColor : return "OneMinusDstColor";
  3343. case BlendFactor::eSrcAlpha : return "SrcAlpha";
  3344. case BlendFactor::eOneMinusSrcAlpha : return "OneMinusSrcAlpha";
  3345. case BlendFactor::eDstAlpha : return "DstAlpha";
  3346. case BlendFactor::eOneMinusDstAlpha : return "OneMinusDstAlpha";
  3347. case BlendFactor::eConstantColor : return "ConstantColor";
  3348. case BlendFactor::eOneMinusConstantColor : return "OneMinusConstantColor";
  3349. case BlendFactor::eConstantAlpha : return "ConstantAlpha";
  3350. case BlendFactor::eOneMinusConstantAlpha : return "OneMinusConstantAlpha";
  3351. case BlendFactor::eSrcAlphaSaturate : return "SrcAlphaSaturate";
  3352. case BlendFactor::eSrc1Color : return "Src1Color";
  3353. case BlendFactor::eOneMinusSrc1Color : return "OneMinusSrc1Color";
  3354. case BlendFactor::eSrc1Alpha : return "Src1Alpha";
  3355. case BlendFactor::eOneMinusSrc1Alpha : return "OneMinusSrc1Alpha";
  3356. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3357. }
  3358. }
  3359. enum class BlendOp
  3360. {
  3361. eAdd = VK_BLEND_OP_ADD,
  3362. eSubtract = VK_BLEND_OP_SUBTRACT,
  3363. eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
  3364. eMin = VK_BLEND_OP_MIN,
  3365. eMax = VK_BLEND_OP_MAX,
  3366. eZeroEXT = VK_BLEND_OP_ZERO_EXT,
  3367. eSrcEXT = VK_BLEND_OP_SRC_EXT,
  3368. eDstEXT = VK_BLEND_OP_DST_EXT,
  3369. eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
  3370. eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
  3371. eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
  3372. eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
  3373. eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
  3374. eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
  3375. eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
  3376. eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
  3377. eXorEXT = VK_BLEND_OP_XOR_EXT,
  3378. eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
  3379. eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
  3380. eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
  3381. eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
  3382. eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
  3383. eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
  3384. eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
  3385. eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
  3386. eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
  3387. eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
  3388. eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
  3389. eInvertEXT = VK_BLEND_OP_INVERT_EXT,
  3390. eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
  3391. eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
  3392. eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
  3393. eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
  3394. eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
  3395. ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
  3396. eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
  3397. eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
  3398. eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
  3399. eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
  3400. eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
  3401. ePlusEXT = VK_BLEND_OP_PLUS_EXT,
  3402. ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
  3403. ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
  3404. ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
  3405. eMinusEXT = VK_BLEND_OP_MINUS_EXT,
  3406. eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
  3407. eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
  3408. eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
  3409. eRedEXT = VK_BLEND_OP_RED_EXT,
  3410. eGreenEXT = VK_BLEND_OP_GREEN_EXT,
  3411. eBlueEXT = VK_BLEND_OP_BLUE_EXT};
  3412. VULKAN_HPP_INLINE std::string to_string( BlendOp value )
  3413. {
  3414. switch ( value )
  3415. {
  3416. case BlendOp::eAdd : return "Add";
  3417. case BlendOp::eSubtract : return "Subtract";
  3418. case BlendOp::eReverseSubtract : return "ReverseSubtract";
  3419. case BlendOp::eMin : return "Min";
  3420. case BlendOp::eMax : return "Max";
  3421. case BlendOp::eZeroEXT : return "ZeroEXT";
  3422. case BlendOp::eSrcEXT : return "SrcEXT";
  3423. case BlendOp::eDstEXT : return "DstEXT";
  3424. case BlendOp::eSrcOverEXT : return "SrcOverEXT";
  3425. case BlendOp::eDstOverEXT : return "DstOverEXT";
  3426. case BlendOp::eSrcInEXT : return "SrcInEXT";
  3427. case BlendOp::eDstInEXT : return "DstInEXT";
  3428. case BlendOp::eSrcOutEXT : return "SrcOutEXT";
  3429. case BlendOp::eDstOutEXT : return "DstOutEXT";
  3430. case BlendOp::eSrcAtopEXT : return "SrcAtopEXT";
  3431. case BlendOp::eDstAtopEXT : return "DstAtopEXT";
  3432. case BlendOp::eXorEXT : return "XorEXT";
  3433. case BlendOp::eMultiplyEXT : return "MultiplyEXT";
  3434. case BlendOp::eScreenEXT : return "ScreenEXT";
  3435. case BlendOp::eOverlayEXT : return "OverlayEXT";
  3436. case BlendOp::eDarkenEXT : return "DarkenEXT";
  3437. case BlendOp::eLightenEXT : return "LightenEXT";
  3438. case BlendOp::eColordodgeEXT : return "ColordodgeEXT";
  3439. case BlendOp::eColorburnEXT : return "ColorburnEXT";
  3440. case BlendOp::eHardlightEXT : return "HardlightEXT";
  3441. case BlendOp::eSoftlightEXT : return "SoftlightEXT";
  3442. case BlendOp::eDifferenceEXT : return "DifferenceEXT";
  3443. case BlendOp::eExclusionEXT : return "ExclusionEXT";
  3444. case BlendOp::eInvertEXT : return "InvertEXT";
  3445. case BlendOp::eInvertRgbEXT : return "InvertRgbEXT";
  3446. case BlendOp::eLineardodgeEXT : return "LineardodgeEXT";
  3447. case BlendOp::eLinearburnEXT : return "LinearburnEXT";
  3448. case BlendOp::eVividlightEXT : return "VividlightEXT";
  3449. case BlendOp::eLinearlightEXT : return "LinearlightEXT";
  3450. case BlendOp::ePinlightEXT : return "PinlightEXT";
  3451. case BlendOp::eHardmixEXT : return "HardmixEXT";
  3452. case BlendOp::eHslHueEXT : return "HslHueEXT";
  3453. case BlendOp::eHslSaturationEXT : return "HslSaturationEXT";
  3454. case BlendOp::eHslColorEXT : return "HslColorEXT";
  3455. case BlendOp::eHslLuminosityEXT : return "HslLuminosityEXT";
  3456. case BlendOp::ePlusEXT : return "PlusEXT";
  3457. case BlendOp::ePlusClampedEXT : return "PlusClampedEXT";
  3458. case BlendOp::ePlusClampedAlphaEXT : return "PlusClampedAlphaEXT";
  3459. case BlendOp::ePlusDarkerEXT : return "PlusDarkerEXT";
  3460. case BlendOp::eMinusEXT : return "MinusEXT";
  3461. case BlendOp::eMinusClampedEXT : return "MinusClampedEXT";
  3462. case BlendOp::eContrastEXT : return "ContrastEXT";
  3463. case BlendOp::eInvertOvgEXT : return "InvertOvgEXT";
  3464. case BlendOp::eRedEXT : return "RedEXT";
  3465. case BlendOp::eGreenEXT : return "GreenEXT";
  3466. case BlendOp::eBlueEXT : return "BlueEXT";
  3467. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3468. }
  3469. }
  3470. enum class BlendOverlapEXT
  3471. {
  3472. eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
  3473. eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
  3474. eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT};
  3475. VULKAN_HPP_INLINE std::string to_string( BlendOverlapEXT value )
  3476. {
  3477. switch ( value )
  3478. {
  3479. case BlendOverlapEXT::eUncorrelated : return "Uncorrelated";
  3480. case BlendOverlapEXT::eDisjoint : return "Disjoint";
  3481. case BlendOverlapEXT::eConjoint : return "Conjoint";
  3482. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3483. }
  3484. }
  3485. enum class BorderColor
  3486. {
  3487. eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
  3488. eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
  3489. eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
  3490. eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
  3491. eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  3492. eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
  3493. eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
  3494. eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT};
  3495. VULKAN_HPP_INLINE std::string to_string( BorderColor value )
  3496. {
  3497. switch ( value )
  3498. {
  3499. case BorderColor::eFloatTransparentBlack : return "FloatTransparentBlack";
  3500. case BorderColor::eIntTransparentBlack : return "IntTransparentBlack";
  3501. case BorderColor::eFloatOpaqueBlack : return "FloatOpaqueBlack";
  3502. case BorderColor::eIntOpaqueBlack : return "IntOpaqueBlack";
  3503. case BorderColor::eFloatOpaqueWhite : return "FloatOpaqueWhite";
  3504. case BorderColor::eIntOpaqueWhite : return "IntOpaqueWhite";
  3505. case BorderColor::eFloatCustomEXT : return "FloatCustomEXT";
  3506. case BorderColor::eIntCustomEXT : return "IntCustomEXT";
  3507. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3508. }
  3509. }
  3510. enum class BufferCreateFlagBits : VkBufferCreateFlags
  3511. {
  3512. eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
  3513. eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
  3514. eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
  3515. eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
  3516. eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
  3517. eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
  3518. eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR};
  3519. VULKAN_HPP_INLINE std::string to_string( BufferCreateFlagBits value )
  3520. {
  3521. switch ( value )
  3522. {
  3523. case BufferCreateFlagBits::eSparseBinding : return "SparseBinding";
  3524. case BufferCreateFlagBits::eSparseResidency : return "SparseResidency";
  3525. case BufferCreateFlagBits::eSparseAliased : return "SparseAliased";
  3526. case BufferCreateFlagBits::eProtected : return "Protected";
  3527. case BufferCreateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
  3528. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3529. }
  3530. }
  3531. enum class BufferUsageFlagBits : VkBufferUsageFlags
  3532. {
  3533. eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
  3534. eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
  3535. eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
  3536. eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
  3537. eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
  3538. eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
  3539. eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
  3540. eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
  3541. eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
  3542. eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
  3543. eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
  3544. eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
  3545. eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
  3546. eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
  3547. eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
  3548. eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
  3549. eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
  3550. eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
  3551. eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR};
  3552. VULKAN_HPP_INLINE std::string to_string( BufferUsageFlagBits value )
  3553. {
  3554. switch ( value )
  3555. {
  3556. case BufferUsageFlagBits::eTransferSrc : return "TransferSrc";
  3557. case BufferUsageFlagBits::eTransferDst : return "TransferDst";
  3558. case BufferUsageFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
  3559. case BufferUsageFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
  3560. case BufferUsageFlagBits::eUniformBuffer : return "UniformBuffer";
  3561. case BufferUsageFlagBits::eStorageBuffer : return "StorageBuffer";
  3562. case BufferUsageFlagBits::eIndexBuffer : return "IndexBuffer";
  3563. case BufferUsageFlagBits::eVertexBuffer : return "VertexBuffer";
  3564. case BufferUsageFlagBits::eIndirectBuffer : return "IndirectBuffer";
  3565. case BufferUsageFlagBits::eShaderDeviceAddress : return "ShaderDeviceAddress";
  3566. case BufferUsageFlagBits::eTransformFeedbackBufferEXT : return "TransformFeedbackBufferEXT";
  3567. case BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT : return "TransformFeedbackCounterBufferEXT";
  3568. case BufferUsageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
  3569. case BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR : return "AccelerationStructureBuildInputReadOnlyKHR";
  3570. case BufferUsageFlagBits::eAccelerationStructureStorageKHR : return "AccelerationStructureStorageKHR";
  3571. case BufferUsageFlagBits::eShaderBindingTableKHR : return "ShaderBindingTableKHR";
  3572. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3573. }
  3574. }
  3575. enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
  3576. {
  3577. eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
  3578. eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
  3579. ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
  3580. ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
  3581. eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR};
  3582. using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
  3583. VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagBitsKHR value )
  3584. {
  3585. switch ( value )
  3586. {
  3587. case BuildAccelerationStructureFlagBitsKHR::eAllowUpdate : return "AllowUpdate";
  3588. case BuildAccelerationStructureFlagBitsKHR::eAllowCompaction : return "AllowCompaction";
  3589. case BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace : return "PreferFastTrace";
  3590. case BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild : return "PreferFastBuild";
  3591. case BuildAccelerationStructureFlagBitsKHR::eLowMemory : return "LowMemory";
  3592. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3593. }
  3594. }
  3595. enum class BuildAccelerationStructureModeKHR
  3596. {
  3597. eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR,
  3598. eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR};
  3599. VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureModeKHR value )
  3600. {
  3601. switch ( value )
  3602. {
  3603. case BuildAccelerationStructureModeKHR::eBuild : return "Build";
  3604. case BuildAccelerationStructureModeKHR::eUpdate : return "Update";
  3605. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3606. }
  3607. }
  3608. enum class ChromaLocation
  3609. {
  3610. eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
  3611. eMidpoint = VK_CHROMA_LOCATION_MIDPOINT};
  3612. using ChromaLocationKHR = ChromaLocation;
  3613. VULKAN_HPP_INLINE std::string to_string( ChromaLocation value )
  3614. {
  3615. switch ( value )
  3616. {
  3617. case ChromaLocation::eCositedEven : return "CositedEven";
  3618. case ChromaLocation::eMidpoint : return "Midpoint";
  3619. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3620. }
  3621. }
  3622. enum class CoarseSampleOrderTypeNV
  3623. {
  3624. eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
  3625. eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
  3626. ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
  3627. eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV};
  3628. VULKAN_HPP_INLINE std::string to_string( CoarseSampleOrderTypeNV value )
  3629. {
  3630. switch ( value )
  3631. {
  3632. case CoarseSampleOrderTypeNV::eDefault : return "Default";
  3633. case CoarseSampleOrderTypeNV::eCustom : return "Custom";
  3634. case CoarseSampleOrderTypeNV::ePixelMajor : return "PixelMajor";
  3635. case CoarseSampleOrderTypeNV::eSampleMajor : return "SampleMajor";
  3636. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3637. }
  3638. }
  3639. enum class ColorComponentFlagBits : VkColorComponentFlags
  3640. {
  3641. eR = VK_COLOR_COMPONENT_R_BIT,
  3642. eG = VK_COLOR_COMPONENT_G_BIT,
  3643. eB = VK_COLOR_COMPONENT_B_BIT,
  3644. eA = VK_COLOR_COMPONENT_A_BIT};
  3645. VULKAN_HPP_INLINE std::string to_string( ColorComponentFlagBits value )
  3646. {
  3647. switch ( value )
  3648. {
  3649. case ColorComponentFlagBits::eR : return "R";
  3650. case ColorComponentFlagBits::eG : return "G";
  3651. case ColorComponentFlagBits::eB : return "B";
  3652. case ColorComponentFlagBits::eA : return "A";
  3653. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3654. }
  3655. }
  3656. enum class ColorSpaceKHR
  3657. {
  3658. eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
  3659. eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
  3660. eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
  3661. eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
  3662. eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
  3663. eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
  3664. eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
  3665. eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
  3666. eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
  3667. eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
  3668. eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
  3669. eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
  3670. eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
  3671. ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
  3672. eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
  3673. eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
  3674. eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
  3675. eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT};
  3676. VULKAN_HPP_INLINE std::string to_string( ColorSpaceKHR value )
  3677. {
  3678. switch ( value )
  3679. {
  3680. case ColorSpaceKHR::eSrgbNonlinear : return "SrgbNonlinear";
  3681. case ColorSpaceKHR::eDisplayP3NonlinearEXT : return "DisplayP3NonlinearEXT";
  3682. case ColorSpaceKHR::eExtendedSrgbLinearEXT : return "ExtendedSrgbLinearEXT";
  3683. case ColorSpaceKHR::eDisplayP3LinearEXT : return "DisplayP3LinearEXT";
  3684. case ColorSpaceKHR::eDciP3NonlinearEXT : return "DciP3NonlinearEXT";
  3685. case ColorSpaceKHR::eBt709LinearEXT : return "Bt709LinearEXT";
  3686. case ColorSpaceKHR::eBt709NonlinearEXT : return "Bt709NonlinearEXT";
  3687. case ColorSpaceKHR::eBt2020LinearEXT : return "Bt2020LinearEXT";
  3688. case ColorSpaceKHR::eHdr10St2084EXT : return "Hdr10St2084EXT";
  3689. case ColorSpaceKHR::eDolbyvisionEXT : return "DolbyvisionEXT";
  3690. case ColorSpaceKHR::eHdr10HlgEXT : return "Hdr10HlgEXT";
  3691. case ColorSpaceKHR::eAdobergbLinearEXT : return "AdobergbLinearEXT";
  3692. case ColorSpaceKHR::eAdobergbNonlinearEXT : return "AdobergbNonlinearEXT";
  3693. case ColorSpaceKHR::ePassThroughEXT : return "PassThroughEXT";
  3694. case ColorSpaceKHR::eExtendedSrgbNonlinearEXT : return "ExtendedSrgbNonlinearEXT";
  3695. case ColorSpaceKHR::eDisplayNativeAMD : return "DisplayNativeAMD";
  3696. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3697. }
  3698. }
  3699. enum class CommandBufferLevel
  3700. {
  3701. ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  3702. eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY};
  3703. VULKAN_HPP_INLINE std::string to_string( CommandBufferLevel value )
  3704. {
  3705. switch ( value )
  3706. {
  3707. case CommandBufferLevel::ePrimary : return "Primary";
  3708. case CommandBufferLevel::eSecondary : return "Secondary";
  3709. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3710. }
  3711. }
  3712. enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
  3713. {
  3714. eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT};
  3715. VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlagBits value )
  3716. {
  3717. switch ( value )
  3718. {
  3719. case CommandBufferResetFlagBits::eReleaseResources : return "ReleaseResources";
  3720. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3721. }
  3722. }
  3723. enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
  3724. {
  3725. eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
  3726. eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
  3727. eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT};
  3728. VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlagBits value )
  3729. {
  3730. switch ( value )
  3731. {
  3732. case CommandBufferUsageFlagBits::eOneTimeSubmit : return "OneTimeSubmit";
  3733. case CommandBufferUsageFlagBits::eRenderPassContinue : return "RenderPassContinue";
  3734. case CommandBufferUsageFlagBits::eSimultaneousUse : return "SimultaneousUse";
  3735. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3736. }
  3737. }
  3738. enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
  3739. {
  3740. eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
  3741. eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  3742. eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT};
  3743. VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlagBits value )
  3744. {
  3745. switch ( value )
  3746. {
  3747. case CommandPoolCreateFlagBits::eTransient : return "Transient";
  3748. case CommandPoolCreateFlagBits::eResetCommandBuffer : return "ResetCommandBuffer";
  3749. case CommandPoolCreateFlagBits::eProtected : return "Protected";
  3750. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3751. }
  3752. }
  3753. enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
  3754. {
  3755. eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT};
  3756. VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlagBits value )
  3757. {
  3758. switch ( value )
  3759. {
  3760. case CommandPoolResetFlagBits::eReleaseResources : return "ReleaseResources";
  3761. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3762. }
  3763. }
  3764. enum class CompareOp
  3765. {
  3766. eNever = VK_COMPARE_OP_NEVER,
  3767. eLess = VK_COMPARE_OP_LESS,
  3768. eEqual = VK_COMPARE_OP_EQUAL,
  3769. eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
  3770. eGreater = VK_COMPARE_OP_GREATER,
  3771. eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
  3772. eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
  3773. eAlways = VK_COMPARE_OP_ALWAYS};
  3774. VULKAN_HPP_INLINE std::string to_string( CompareOp value )
  3775. {
  3776. switch ( value )
  3777. {
  3778. case CompareOp::eNever : return "Never";
  3779. case CompareOp::eLess : return "Less";
  3780. case CompareOp::eEqual : return "Equal";
  3781. case CompareOp::eLessOrEqual : return "LessOrEqual";
  3782. case CompareOp::eGreater : return "Greater";
  3783. case CompareOp::eNotEqual : return "NotEqual";
  3784. case CompareOp::eGreaterOrEqual : return "GreaterOrEqual";
  3785. case CompareOp::eAlways : return "Always";
  3786. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3787. }
  3788. }
  3789. enum class ComponentSwizzle
  3790. {
  3791. eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
  3792. eZero = VK_COMPONENT_SWIZZLE_ZERO,
  3793. eOne = VK_COMPONENT_SWIZZLE_ONE,
  3794. eR = VK_COMPONENT_SWIZZLE_R,
  3795. eG = VK_COMPONENT_SWIZZLE_G,
  3796. eB = VK_COMPONENT_SWIZZLE_B,
  3797. eA = VK_COMPONENT_SWIZZLE_A};
  3798. VULKAN_HPP_INLINE std::string to_string( ComponentSwizzle value )
  3799. {
  3800. switch ( value )
  3801. {
  3802. case ComponentSwizzle::eIdentity : return "Identity";
  3803. case ComponentSwizzle::eZero : return "Zero";
  3804. case ComponentSwizzle::eOne : return "One";
  3805. case ComponentSwizzle::eR : return "R";
  3806. case ComponentSwizzle::eG : return "G";
  3807. case ComponentSwizzle::eB : return "B";
  3808. case ComponentSwizzle::eA : return "A";
  3809. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3810. }
  3811. }
  3812. enum class ComponentTypeNV
  3813. {
  3814. eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
  3815. eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
  3816. eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
  3817. eSint8 = VK_COMPONENT_TYPE_SINT8_NV,
  3818. eSint16 = VK_COMPONENT_TYPE_SINT16_NV,
  3819. eSint32 = VK_COMPONENT_TYPE_SINT32_NV,
  3820. eSint64 = VK_COMPONENT_TYPE_SINT64_NV,
  3821. eUint8 = VK_COMPONENT_TYPE_UINT8_NV,
  3822. eUint16 = VK_COMPONENT_TYPE_UINT16_NV,
  3823. eUint32 = VK_COMPONENT_TYPE_UINT32_NV,
  3824. eUint64 = VK_COMPONENT_TYPE_UINT64_NV};
  3825. VULKAN_HPP_INLINE std::string to_string( ComponentTypeNV value )
  3826. {
  3827. switch ( value )
  3828. {
  3829. case ComponentTypeNV::eFloat16 : return "Float16";
  3830. case ComponentTypeNV::eFloat32 : return "Float32";
  3831. case ComponentTypeNV::eFloat64 : return "Float64";
  3832. case ComponentTypeNV::eSint8 : return "Sint8";
  3833. case ComponentTypeNV::eSint16 : return "Sint16";
  3834. case ComponentTypeNV::eSint32 : return "Sint32";
  3835. case ComponentTypeNV::eSint64 : return "Sint64";
  3836. case ComponentTypeNV::eUint8 : return "Uint8";
  3837. case ComponentTypeNV::eUint16 : return "Uint16";
  3838. case ComponentTypeNV::eUint32 : return "Uint32";
  3839. case ComponentTypeNV::eUint64 : return "Uint64";
  3840. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3841. }
  3842. }
  3843. enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
  3844. {
  3845. eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  3846. ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
  3847. ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
  3848. eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR};
  3849. VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagBitsKHR value )
  3850. {
  3851. switch ( value )
  3852. {
  3853. case CompositeAlphaFlagBitsKHR::eOpaque : return "Opaque";
  3854. case CompositeAlphaFlagBitsKHR::ePreMultiplied : return "PreMultiplied";
  3855. case CompositeAlphaFlagBitsKHR::ePostMultiplied : return "PostMultiplied";
  3856. case CompositeAlphaFlagBitsKHR::eInherit : return "Inherit";
  3857. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3858. }
  3859. }
  3860. enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
  3861. {
  3862. eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT};
  3863. VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagBitsEXT value )
  3864. {
  3865. switch ( value )
  3866. {
  3867. case ConditionalRenderingFlagBitsEXT::eInverted : return "Inverted";
  3868. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3869. }
  3870. }
  3871. enum class ConservativeRasterizationModeEXT
  3872. {
  3873. eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
  3874. eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
  3875. eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT};
  3876. VULKAN_HPP_INLINE std::string to_string( ConservativeRasterizationModeEXT value )
  3877. {
  3878. switch ( value )
  3879. {
  3880. case ConservativeRasterizationModeEXT::eDisabled : return "Disabled";
  3881. case ConservativeRasterizationModeEXT::eOverestimate : return "Overestimate";
  3882. case ConservativeRasterizationModeEXT::eUnderestimate : return "Underestimate";
  3883. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3884. }
  3885. }
  3886. enum class CopyAccelerationStructureModeKHR
  3887. {
  3888. eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
  3889. eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
  3890. eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
  3891. eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR};
  3892. using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
  3893. VULKAN_HPP_INLINE std::string to_string( CopyAccelerationStructureModeKHR value )
  3894. {
  3895. switch ( value )
  3896. {
  3897. case CopyAccelerationStructureModeKHR::eClone : return "Clone";
  3898. case CopyAccelerationStructureModeKHR::eCompact : return "Compact";
  3899. case CopyAccelerationStructureModeKHR::eSerialize : return "Serialize";
  3900. case CopyAccelerationStructureModeKHR::eDeserialize : return "Deserialize";
  3901. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3902. }
  3903. }
  3904. enum class CoverageModulationModeNV
  3905. {
  3906. eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
  3907. eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
  3908. eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
  3909. eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV};
  3910. VULKAN_HPP_INLINE std::string to_string( CoverageModulationModeNV value )
  3911. {
  3912. switch ( value )
  3913. {
  3914. case CoverageModulationModeNV::eNone : return "None";
  3915. case CoverageModulationModeNV::eRgb : return "Rgb";
  3916. case CoverageModulationModeNV::eAlpha : return "Alpha";
  3917. case CoverageModulationModeNV::eRgba : return "Rgba";
  3918. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3919. }
  3920. }
  3921. enum class CoverageReductionModeNV
  3922. {
  3923. eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
  3924. eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV};
  3925. VULKAN_HPP_INLINE std::string to_string( CoverageReductionModeNV value )
  3926. {
  3927. switch ( value )
  3928. {
  3929. case CoverageReductionModeNV::eMerge : return "Merge";
  3930. case CoverageReductionModeNV::eTruncate : return "Truncate";
  3931. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3932. }
  3933. }
  3934. enum class CullModeFlagBits : VkCullModeFlags
  3935. {
  3936. eNone = VK_CULL_MODE_NONE,
  3937. eFront = VK_CULL_MODE_FRONT_BIT,
  3938. eBack = VK_CULL_MODE_BACK_BIT,
  3939. eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK};
  3940. VULKAN_HPP_INLINE std::string to_string( CullModeFlagBits value )
  3941. {
  3942. switch ( value )
  3943. {
  3944. case CullModeFlagBits::eNone : return "None";
  3945. case CullModeFlagBits::eFront : return "Front";
  3946. case CullModeFlagBits::eBack : return "Back";
  3947. case CullModeFlagBits::eFrontAndBack : return "FrontAndBack";
  3948. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3949. }
  3950. }
  3951. enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
  3952. {
  3953. eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
  3954. eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
  3955. ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
  3956. eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
  3957. eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT};
  3958. VULKAN_HPP_INLINE std::string to_string( DebugReportFlagBitsEXT value )
  3959. {
  3960. switch ( value )
  3961. {
  3962. case DebugReportFlagBitsEXT::eInformation : return "Information";
  3963. case DebugReportFlagBitsEXT::eWarning : return "Warning";
  3964. case DebugReportFlagBitsEXT::ePerformanceWarning : return "PerformanceWarning";
  3965. case DebugReportFlagBitsEXT::eError : return "Error";
  3966. case DebugReportFlagBitsEXT::eDebug : return "Debug";
  3967. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  3968. }
  3969. }
  3970. enum class DebugReportObjectTypeEXT
  3971. {
  3972. eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
  3973. eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
  3974. ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
  3975. eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
  3976. eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
  3977. eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
  3978. eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
  3979. eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
  3980. eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
  3981. eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
  3982. eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
  3983. eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
  3984. eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
  3985. eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
  3986. eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
  3987. eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
  3988. ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
  3989. ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
  3990. eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
  3991. ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
  3992. eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
  3993. eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
  3994. eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
  3995. eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
  3996. eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
  3997. eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
  3998. eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
  3999. eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
  4000. eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
  4001. eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
  4002. eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
  4003. eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
  4004. eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
  4005. eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
  4006. eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
  4007. eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
  4008. eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
  4009. eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
  4010. eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
  4011. eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT};
  4012. VULKAN_HPP_INLINE std::string to_string( DebugReportObjectTypeEXT value )
  4013. {
  4014. switch ( value )
  4015. {
  4016. case DebugReportObjectTypeEXT::eUnknown : return "Unknown";
  4017. case DebugReportObjectTypeEXT::eInstance : return "Instance";
  4018. case DebugReportObjectTypeEXT::ePhysicalDevice : return "PhysicalDevice";
  4019. case DebugReportObjectTypeEXT::eDevice : return "Device";
  4020. case DebugReportObjectTypeEXT::eQueue : return "Queue";
  4021. case DebugReportObjectTypeEXT::eSemaphore : return "Semaphore";
  4022. case DebugReportObjectTypeEXT::eCommandBuffer : return "CommandBuffer";
  4023. case DebugReportObjectTypeEXT::eFence : return "Fence";
  4024. case DebugReportObjectTypeEXT::eDeviceMemory : return "DeviceMemory";
  4025. case DebugReportObjectTypeEXT::eBuffer : return "Buffer";
  4026. case DebugReportObjectTypeEXT::eImage : return "Image";
  4027. case DebugReportObjectTypeEXT::eEvent : return "Event";
  4028. case DebugReportObjectTypeEXT::eQueryPool : return "QueryPool";
  4029. case DebugReportObjectTypeEXT::eBufferView : return "BufferView";
  4030. case DebugReportObjectTypeEXT::eImageView : return "ImageView";
  4031. case DebugReportObjectTypeEXT::eShaderModule : return "ShaderModule";
  4032. case DebugReportObjectTypeEXT::ePipelineCache : return "PipelineCache";
  4033. case DebugReportObjectTypeEXT::ePipelineLayout : return "PipelineLayout";
  4034. case DebugReportObjectTypeEXT::eRenderPass : return "RenderPass";
  4035. case DebugReportObjectTypeEXT::ePipeline : return "Pipeline";
  4036. case DebugReportObjectTypeEXT::eDescriptorSetLayout : return "DescriptorSetLayout";
  4037. case DebugReportObjectTypeEXT::eSampler : return "Sampler";
  4038. case DebugReportObjectTypeEXT::eDescriptorPool : return "DescriptorPool";
  4039. case DebugReportObjectTypeEXT::eDescriptorSet : return "DescriptorSet";
  4040. case DebugReportObjectTypeEXT::eFramebuffer : return "Framebuffer";
  4041. case DebugReportObjectTypeEXT::eCommandPool : return "CommandPool";
  4042. case DebugReportObjectTypeEXT::eSurfaceKHR : return "SurfaceKHR";
  4043. case DebugReportObjectTypeEXT::eSwapchainKHR : return "SwapchainKHR";
  4044. case DebugReportObjectTypeEXT::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
  4045. case DebugReportObjectTypeEXT::eDisplayKHR : return "DisplayKHR";
  4046. case DebugReportObjectTypeEXT::eDisplayModeKHR : return "DisplayModeKHR";
  4047. case DebugReportObjectTypeEXT::eValidationCacheEXT : return "ValidationCacheEXT";
  4048. case DebugReportObjectTypeEXT::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
  4049. case DebugReportObjectTypeEXT::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
  4050. case DebugReportObjectTypeEXT::eAccelerationStructureKHR : return "AccelerationStructureKHR";
  4051. case DebugReportObjectTypeEXT::eAccelerationStructureNV : return "AccelerationStructureNV";
  4052. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4053. }
  4054. }
  4055. enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
  4056. {
  4057. eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
  4058. eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
  4059. eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
  4060. eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT};
  4061. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagBitsEXT value )
  4062. {
  4063. switch ( value )
  4064. {
  4065. case DebugUtilsMessageSeverityFlagBitsEXT::eVerbose : return "Verbose";
  4066. case DebugUtilsMessageSeverityFlagBitsEXT::eInfo : return "Info";
  4067. case DebugUtilsMessageSeverityFlagBitsEXT::eWarning : return "Warning";
  4068. case DebugUtilsMessageSeverityFlagBitsEXT::eError : return "Error";
  4069. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4070. }
  4071. }
  4072. enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
  4073. {
  4074. eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
  4075. eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
  4076. ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT};
  4077. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagBitsEXT value )
  4078. {
  4079. switch ( value )
  4080. {
  4081. case DebugUtilsMessageTypeFlagBitsEXT::eGeneral : return "General";
  4082. case DebugUtilsMessageTypeFlagBitsEXT::eValidation : return "Validation";
  4083. case DebugUtilsMessageTypeFlagBitsEXT::ePerformance : return "Performance";
  4084. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4085. }
  4086. }
  4087. enum class DependencyFlagBits : VkDependencyFlags
  4088. {
  4089. eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
  4090. eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
  4091. eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
  4092. eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
  4093. eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR};
  4094. VULKAN_HPP_INLINE std::string to_string( DependencyFlagBits value )
  4095. {
  4096. switch ( value )
  4097. {
  4098. case DependencyFlagBits::eByRegion : return "ByRegion";
  4099. case DependencyFlagBits::eDeviceGroup : return "DeviceGroup";
  4100. case DependencyFlagBits::eViewLocal : return "ViewLocal";
  4101. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4102. }
  4103. }
  4104. enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
  4105. {
  4106. eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
  4107. eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
  4108. ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
  4109. eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT};
  4110. using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
  4111. VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlagBits value )
  4112. {
  4113. switch ( value )
  4114. {
  4115. case DescriptorBindingFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
  4116. case DescriptorBindingFlagBits::eUpdateUnusedWhilePending : return "UpdateUnusedWhilePending";
  4117. case DescriptorBindingFlagBits::ePartiallyBound : return "PartiallyBound";
  4118. case DescriptorBindingFlagBits::eVariableDescriptorCount : return "VariableDescriptorCount";
  4119. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4120. }
  4121. }
  4122. enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
  4123. {
  4124. eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
  4125. eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
  4126. eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE,
  4127. eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT};
  4128. VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlagBits value )
  4129. {
  4130. switch ( value )
  4131. {
  4132. case DescriptorPoolCreateFlagBits::eFreeDescriptorSet : return "FreeDescriptorSet";
  4133. case DescriptorPoolCreateFlagBits::eUpdateAfterBind : return "UpdateAfterBind";
  4134. case DescriptorPoolCreateFlagBits::eHostOnlyVALVE : return "HostOnlyVALVE";
  4135. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4136. }
  4137. }
  4138. enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
  4139. {
  4140. eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
  4141. ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
  4142. eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
  4143. eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT};
  4144. VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlagBits value )
  4145. {
  4146. switch ( value )
  4147. {
  4148. case DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool : return "UpdateAfterBindPool";
  4149. case DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR : return "PushDescriptorKHR";
  4150. case DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE : return "HostOnlyPoolVALVE";
  4151. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4152. }
  4153. }
  4154. enum class DescriptorType
  4155. {
  4156. eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
  4157. eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  4158. eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
  4159. eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
  4160. eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
  4161. eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
  4162. eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
  4163. eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
  4164. eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
  4165. eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
  4166. eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
  4167. eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
  4168. eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
  4169. eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
  4170. eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE};
  4171. VULKAN_HPP_INLINE std::string to_string( DescriptorType value )
  4172. {
  4173. switch ( value )
  4174. {
  4175. case DescriptorType::eSampler : return "Sampler";
  4176. case DescriptorType::eCombinedImageSampler : return "CombinedImageSampler";
  4177. case DescriptorType::eSampledImage : return "SampledImage";
  4178. case DescriptorType::eStorageImage : return "StorageImage";
  4179. case DescriptorType::eUniformTexelBuffer : return "UniformTexelBuffer";
  4180. case DescriptorType::eStorageTexelBuffer : return "StorageTexelBuffer";
  4181. case DescriptorType::eUniformBuffer : return "UniformBuffer";
  4182. case DescriptorType::eStorageBuffer : return "StorageBuffer";
  4183. case DescriptorType::eUniformBufferDynamic : return "UniformBufferDynamic";
  4184. case DescriptorType::eStorageBufferDynamic : return "StorageBufferDynamic";
  4185. case DescriptorType::eInputAttachment : return "InputAttachment";
  4186. case DescriptorType::eInlineUniformBlockEXT : return "InlineUniformBlockEXT";
  4187. case DescriptorType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
  4188. case DescriptorType::eAccelerationStructureNV : return "AccelerationStructureNV";
  4189. case DescriptorType::eMutableVALVE : return "MutableVALVE";
  4190. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4191. }
  4192. }
  4193. enum class DescriptorUpdateTemplateType
  4194. {
  4195. eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
  4196. ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR};
  4197. using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
  4198. VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateType value )
  4199. {
  4200. switch ( value )
  4201. {
  4202. case DescriptorUpdateTemplateType::eDescriptorSet : return "DescriptorSet";
  4203. case DescriptorUpdateTemplateType::ePushDescriptorsKHR : return "PushDescriptorsKHR";
  4204. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4205. }
  4206. }
  4207. enum class DeviceCreateFlagBits
  4208. {};
  4209. VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlagBits )
  4210. {
  4211. return "(void)";
  4212. }
  4213. enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
  4214. {
  4215. eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
  4216. eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
  4217. eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV};
  4218. VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagBitsNV value )
  4219. {
  4220. switch ( value )
  4221. {
  4222. case DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo : return "EnableShaderDebugInfo";
  4223. case DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking : return "EnableResourceTracking";
  4224. case DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints : return "EnableAutomaticCheckpoints";
  4225. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4226. }
  4227. }
  4228. enum class DeviceEventTypeEXT
  4229. {
  4230. eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT};
  4231. VULKAN_HPP_INLINE std::string to_string( DeviceEventTypeEXT value )
  4232. {
  4233. switch ( value )
  4234. {
  4235. case DeviceEventTypeEXT::eDisplayHotplug : return "DisplayHotplug";
  4236. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4237. }
  4238. }
  4239. enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
  4240. {
  4241. eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
  4242. eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
  4243. eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
  4244. eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR};
  4245. VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagBitsKHR value )
  4246. {
  4247. switch ( value )
  4248. {
  4249. case DeviceGroupPresentModeFlagBitsKHR::eLocal : return "Local";
  4250. case DeviceGroupPresentModeFlagBitsKHR::eRemote : return "Remote";
  4251. case DeviceGroupPresentModeFlagBitsKHR::eSum : return "Sum";
  4252. case DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice : return "LocalMultiDevice";
  4253. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4254. }
  4255. }
  4256. enum class DeviceMemoryReportEventTypeEXT
  4257. {
  4258. eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,
  4259. eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,
  4260. eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,
  4261. eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,
  4262. eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT};
  4263. VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportEventTypeEXT value )
  4264. {
  4265. switch ( value )
  4266. {
  4267. case DeviceMemoryReportEventTypeEXT::eAllocate : return "Allocate";
  4268. case DeviceMemoryReportEventTypeEXT::eFree : return "Free";
  4269. case DeviceMemoryReportEventTypeEXT::eImport : return "Import";
  4270. case DeviceMemoryReportEventTypeEXT::eUnimport : return "Unimport";
  4271. case DeviceMemoryReportEventTypeEXT::eAllocationFailed : return "AllocationFailed";
  4272. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4273. }
  4274. }
  4275. enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
  4276. {
  4277. eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT};
  4278. VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlagBits value )
  4279. {
  4280. switch ( value )
  4281. {
  4282. case DeviceQueueCreateFlagBits::eProtected : return "Protected";
  4283. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4284. }
  4285. }
  4286. enum class DiscardRectangleModeEXT
  4287. {
  4288. eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
  4289. eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT};
  4290. VULKAN_HPP_INLINE std::string to_string( DiscardRectangleModeEXT value )
  4291. {
  4292. switch ( value )
  4293. {
  4294. case DiscardRectangleModeEXT::eInclusive : return "Inclusive";
  4295. case DiscardRectangleModeEXT::eExclusive : return "Exclusive";
  4296. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4297. }
  4298. }
  4299. enum class DisplayEventTypeEXT
  4300. {
  4301. eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT};
  4302. VULKAN_HPP_INLINE std::string to_string( DisplayEventTypeEXT value )
  4303. {
  4304. switch ( value )
  4305. {
  4306. case DisplayEventTypeEXT::eFirstPixelOut : return "FirstPixelOut";
  4307. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4308. }
  4309. }
  4310. enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
  4311. {
  4312. eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
  4313. eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
  4314. ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
  4315. ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR};
  4316. VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagBitsKHR value )
  4317. {
  4318. switch ( value )
  4319. {
  4320. case DisplayPlaneAlphaFlagBitsKHR::eOpaque : return "Opaque";
  4321. case DisplayPlaneAlphaFlagBitsKHR::eGlobal : return "Global";
  4322. case DisplayPlaneAlphaFlagBitsKHR::ePerPixel : return "PerPixel";
  4323. case DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied : return "PerPixelPremultiplied";
  4324. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4325. }
  4326. }
  4327. enum class DisplayPowerStateEXT
  4328. {
  4329. eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
  4330. eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
  4331. eOn = VK_DISPLAY_POWER_STATE_ON_EXT};
  4332. VULKAN_HPP_INLINE std::string to_string( DisplayPowerStateEXT value )
  4333. {
  4334. switch ( value )
  4335. {
  4336. case DisplayPowerStateEXT::eOff : return "Off";
  4337. case DisplayPowerStateEXT::eSuspend : return "Suspend";
  4338. case DisplayPowerStateEXT::eOn : return "On";
  4339. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4340. }
  4341. }
  4342. enum class DriverId
  4343. {
  4344. eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY,
  4345. eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE,
  4346. eMesaRadv = VK_DRIVER_ID_MESA_RADV,
  4347. eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
  4348. eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
  4349. eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
  4350. eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
  4351. eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
  4352. eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY,
  4353. eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
  4354. eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
  4355. eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
  4356. eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
  4357. eMoltenvk = VK_DRIVER_ID_MOLTENVK,
  4358. eIntelOpenSourceMesa = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR};
  4359. using DriverIdKHR = DriverId;
  4360. VULKAN_HPP_INLINE std::string to_string( DriverId value )
  4361. {
  4362. switch ( value )
  4363. {
  4364. case DriverId::eAmdProprietary : return "AmdProprietary";
  4365. case DriverId::eAmdOpenSource : return "AmdOpenSource";
  4366. case DriverId::eMesaRadv : return "MesaRadv";
  4367. case DriverId::eNvidiaProprietary : return "NvidiaProprietary";
  4368. case DriverId::eIntelProprietaryWindows : return "IntelProprietaryWindows";
  4369. case DriverId::eIntelOpenSourceMESA : return "IntelOpenSourceMESA";
  4370. case DriverId::eImaginationProprietary : return "ImaginationProprietary";
  4371. case DriverId::eQualcommProprietary : return "QualcommProprietary";
  4372. case DriverId::eArmProprietary : return "ArmProprietary";
  4373. case DriverId::eGoogleSwiftshader : return "GoogleSwiftshader";
  4374. case DriverId::eGgpProprietary : return "GgpProprietary";
  4375. case DriverId::eBroadcomProprietary : return "BroadcomProprietary";
  4376. case DriverId::eMesaLlvmpipe : return "MesaLlvmpipe";
  4377. case DriverId::eMoltenvk : return "Moltenvk";
  4378. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4379. }
  4380. }
  4381. enum class DynamicState
  4382. {
  4383. eViewport = VK_DYNAMIC_STATE_VIEWPORT,
  4384. eScissor = VK_DYNAMIC_STATE_SCISSOR,
  4385. eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
  4386. eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
  4387. eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
  4388. eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
  4389. eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
  4390. eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
  4391. eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
  4392. eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
  4393. eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
  4394. eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
  4395. eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
  4396. eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
  4397. eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
  4398. eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
  4399. eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
  4400. eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
  4401. eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
  4402. eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
  4403. ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
  4404. eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT,
  4405. eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
  4406. eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
  4407. eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
  4408. eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
  4409. eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
  4410. eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
  4411. eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
  4412. eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT};
  4413. VULKAN_HPP_INLINE std::string to_string( DynamicState value )
  4414. {
  4415. switch ( value )
  4416. {
  4417. case DynamicState::eViewport : return "Viewport";
  4418. case DynamicState::eScissor : return "Scissor";
  4419. case DynamicState::eLineWidth : return "LineWidth";
  4420. case DynamicState::eDepthBias : return "DepthBias";
  4421. case DynamicState::eBlendConstants : return "BlendConstants";
  4422. case DynamicState::eDepthBounds : return "DepthBounds";
  4423. case DynamicState::eStencilCompareMask : return "StencilCompareMask";
  4424. case DynamicState::eStencilWriteMask : return "StencilWriteMask";
  4425. case DynamicState::eStencilReference : return "StencilReference";
  4426. case DynamicState::eViewportWScalingNV : return "ViewportWScalingNV";
  4427. case DynamicState::eDiscardRectangleEXT : return "DiscardRectangleEXT";
  4428. case DynamicState::eSampleLocationsEXT : return "SampleLocationsEXT";
  4429. case DynamicState::eRayTracingPipelineStackSizeKHR : return "RayTracingPipelineStackSizeKHR";
  4430. case DynamicState::eViewportShadingRatePaletteNV : return "ViewportShadingRatePaletteNV";
  4431. case DynamicState::eViewportCoarseSampleOrderNV : return "ViewportCoarseSampleOrderNV";
  4432. case DynamicState::eExclusiveScissorNV : return "ExclusiveScissorNV";
  4433. case DynamicState::eFragmentShadingRateKHR : return "FragmentShadingRateKHR";
  4434. case DynamicState::eLineStippleEXT : return "LineStippleEXT";
  4435. case DynamicState::eCullModeEXT : return "CullModeEXT";
  4436. case DynamicState::eFrontFaceEXT : return "FrontFaceEXT";
  4437. case DynamicState::ePrimitiveTopologyEXT : return "PrimitiveTopologyEXT";
  4438. case DynamicState::eViewportWithCountEXT : return "ViewportWithCountEXT";
  4439. case DynamicState::eScissorWithCountEXT : return "ScissorWithCountEXT";
  4440. case DynamicState::eVertexInputBindingStrideEXT : return "VertexInputBindingStrideEXT";
  4441. case DynamicState::eDepthTestEnableEXT : return "DepthTestEnableEXT";
  4442. case DynamicState::eDepthWriteEnableEXT : return "DepthWriteEnableEXT";
  4443. case DynamicState::eDepthCompareOpEXT : return "DepthCompareOpEXT";
  4444. case DynamicState::eDepthBoundsTestEnableEXT : return "DepthBoundsTestEnableEXT";
  4445. case DynamicState::eStencilTestEnableEXT : return "StencilTestEnableEXT";
  4446. case DynamicState::eStencilOpEXT : return "StencilOpEXT";
  4447. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4448. }
  4449. }
  4450. enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
  4451. {
  4452. eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
  4453. eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT};
  4454. using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
  4455. VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlagBits value )
  4456. {
  4457. switch ( value )
  4458. {
  4459. case ExternalFenceFeatureFlagBits::eExportable : return "Exportable";
  4460. case ExternalFenceFeatureFlagBits::eImportable : return "Importable";
  4461. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4462. }
  4463. }
  4464. enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
  4465. {
  4466. eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
  4467. eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
  4468. eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
  4469. eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT};
  4470. using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
  4471. VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlagBits value )
  4472. {
  4473. switch ( value )
  4474. {
  4475. case ExternalFenceHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
  4476. case ExternalFenceHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
  4477. case ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
  4478. case ExternalFenceHandleTypeFlagBits::eSyncFd : return "SyncFd";
  4479. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4480. }
  4481. }
  4482. enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
  4483. {
  4484. eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
  4485. eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
  4486. eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT};
  4487. using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
  4488. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBits value )
  4489. {
  4490. switch ( value )
  4491. {
  4492. case ExternalMemoryFeatureFlagBits::eDedicatedOnly : return "DedicatedOnly";
  4493. case ExternalMemoryFeatureFlagBits::eExportable : return "Exportable";
  4494. case ExternalMemoryFeatureFlagBits::eImportable : return "Importable";
  4495. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4496. }
  4497. }
  4498. enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
  4499. {
  4500. eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
  4501. eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
  4502. eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV};
  4503. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagBitsNV value )
  4504. {
  4505. switch ( value )
  4506. {
  4507. case ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly : return "DedicatedOnly";
  4508. case ExternalMemoryFeatureFlagBitsNV::eExportable : return "Exportable";
  4509. case ExternalMemoryFeatureFlagBitsNV::eImportable : return "Importable";
  4510. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4511. }
  4512. }
  4513. enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
  4514. {
  4515. eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
  4516. eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
  4517. eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
  4518. eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
  4519. eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
  4520. eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
  4521. eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
  4522. eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
  4523. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  4524. eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
  4525. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  4526. eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
  4527. eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT};
  4528. using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
  4529. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBits value )
  4530. {
  4531. switch ( value )
  4532. {
  4533. case ExternalMemoryHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
  4534. case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
  4535. case ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
  4536. case ExternalMemoryHandleTypeFlagBits::eD3D11Texture : return "D3D11Texture";
  4537. case ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt : return "D3D11TextureKmt";
  4538. case ExternalMemoryHandleTypeFlagBits::eD3D12Heap : return "D3D12Heap";
  4539. case ExternalMemoryHandleTypeFlagBits::eD3D12Resource : return "D3D12Resource";
  4540. case ExternalMemoryHandleTypeFlagBits::eDmaBufEXT : return "DmaBufEXT";
  4541. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  4542. case ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID : return "AndroidHardwareBufferANDROID";
  4543. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  4544. case ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT : return "HostAllocationEXT";
  4545. case ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT : return "HostMappedForeignMemoryEXT";
  4546. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4547. }
  4548. }
  4549. enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
  4550. {
  4551. eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
  4552. eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
  4553. eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
  4554. eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV};
  4555. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagBitsNV value )
  4556. {
  4557. switch ( value )
  4558. {
  4559. case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 : return "OpaqueWin32";
  4560. case ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
  4561. case ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image : return "D3D11Image";
  4562. case ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt : return "D3D11ImageKmt";
  4563. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4564. }
  4565. }
  4566. enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
  4567. {
  4568. eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
  4569. eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT};
  4570. using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
  4571. VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlagBits value )
  4572. {
  4573. switch ( value )
  4574. {
  4575. case ExternalSemaphoreFeatureFlagBits::eExportable : return "Exportable";
  4576. case ExternalSemaphoreFeatureFlagBits::eImportable : return "Importable";
  4577. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4578. }
  4579. }
  4580. enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
  4581. {
  4582. eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
  4583. eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
  4584. eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
  4585. eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
  4586. eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
  4587. eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT};
  4588. using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
  4589. VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlagBits value )
  4590. {
  4591. switch ( value )
  4592. {
  4593. case ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd : return "OpaqueFd";
  4594. case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 : return "OpaqueWin32";
  4595. case ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt : return "OpaqueWin32Kmt";
  4596. case ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence : return "D3D12Fence";
  4597. case ExternalSemaphoreHandleTypeFlagBits::eSyncFd : return "SyncFd";
  4598. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4599. }
  4600. }
  4601. enum class FenceCreateFlagBits : VkFenceCreateFlags
  4602. {
  4603. eSignaled = VK_FENCE_CREATE_SIGNALED_BIT};
  4604. VULKAN_HPP_INLINE std::string to_string( FenceCreateFlagBits value )
  4605. {
  4606. switch ( value )
  4607. {
  4608. case FenceCreateFlagBits::eSignaled : return "Signaled";
  4609. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4610. }
  4611. }
  4612. enum class FenceImportFlagBits : VkFenceImportFlags
  4613. {
  4614. eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT};
  4615. using FenceImportFlagBitsKHR = FenceImportFlagBits;
  4616. VULKAN_HPP_INLINE std::string to_string( FenceImportFlagBits value )
  4617. {
  4618. switch ( value )
  4619. {
  4620. case FenceImportFlagBits::eTemporary : return "Temporary";
  4621. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4622. }
  4623. }
  4624. enum class Filter
  4625. {
  4626. eNearest = VK_FILTER_NEAREST,
  4627. eLinear = VK_FILTER_LINEAR,
  4628. eCubicIMG = VK_FILTER_CUBIC_IMG,
  4629. eCubicEXT = VK_FILTER_CUBIC_EXT};
  4630. VULKAN_HPP_INLINE std::string to_string( Filter value )
  4631. {
  4632. switch ( value )
  4633. {
  4634. case Filter::eNearest : return "Nearest";
  4635. case Filter::eLinear : return "Linear";
  4636. case Filter::eCubicIMG : return "CubicIMG";
  4637. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  4638. }
  4639. }
  4640. enum class Format
  4641. {
  4642. eUndefined = VK_FORMAT_UNDEFINED,
  4643. eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
  4644. eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
  4645. eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
  4646. eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
  4647. eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
  4648. eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
  4649. eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
  4650. eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
  4651. eR8Unorm = VK_FORMAT_R8_UNORM,
  4652. eR8Snorm = VK_FORMAT_R8_SNORM,
  4653. eR8Uscaled = VK_FORMAT_R8_USCALED,
  4654. eR8Sscaled = VK_FORMAT_R8_SSCALED,
  4655. eR8Uint = VK_FORMAT_R8_UINT,
  4656. eR8Sint = VK_FORMAT_R8_SINT,
  4657. eR8Srgb = VK_FORMAT_R8_SRGB,
  4658. eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
  4659. eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
  4660. eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
  4661. eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
  4662. eR8G8Uint = VK_FORMAT_R8G8_UINT,
  4663. eR8G8Sint = VK_FORMAT_R8G8_SINT,
  4664. eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
  4665. eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
  4666. eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
  4667. eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
  4668. eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
  4669. eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
  4670. eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
  4671. eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
  4672. eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
  4673. eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
  4674. eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
  4675. eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
  4676. eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
  4677. eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
  4678. eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
  4679. eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
  4680. eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
  4681. eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
  4682. eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
  4683. eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
  4684. eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
  4685. eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
  4686. eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
  4687. eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
  4688. eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
  4689. eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
  4690. eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
  4691. eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
  4692. eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
  4693. eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
  4694. eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
  4695. eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
  4696. eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
  4697. eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
  4698. eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
  4699. eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
  4700. eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
  4701. eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
  4702. eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
  4703. eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
  4704. eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
  4705. eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
  4706. eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
  4707. eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
  4708. eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
  4709. eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
  4710. eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
  4711. eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
  4712. eR16Unorm = VK_FORMAT_R16_UNORM,
  4713. eR16Snorm = VK_FORMAT_R16_SNORM,
  4714. eR16Uscaled = VK_FORMAT_R16_USCALED,
  4715. eR16Sscaled = VK_FORMAT_R16_SSCALED,
  4716. eR16Uint = VK_FORMAT_R16_UINT,
  4717. eR16Sint = VK_FORMAT_R16_SINT,
  4718. eR16Sfloat = VK_FORMAT_R16_SFLOAT,
  4719. eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
  4720. eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
  4721. eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
  4722. eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
  4723. eR16G16Uint = VK_FORMAT_R16G16_UINT,
  4724. eR16G16Sint = VK_FORMAT_R16G16_SINT,
  4725. eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
  4726. eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
  4727. eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
  4728. eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
  4729. eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
  4730. eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
  4731. eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
  4732. eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
  4733. eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
  4734. eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
  4735. eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
  4736. eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
  4737. eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
  4738. eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
  4739. eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
  4740. eR32Uint = VK_FORMAT_R32_UINT,
  4741. eR32Sint = VK_FORMAT_R32_SINT,
  4742. eR32Sfloat = VK_FORMAT_R32_SFLOAT,
  4743. eR32G32Uint = VK_FORMAT_R32G32_UINT,
  4744. eR32G32Sint = VK_FORMAT_R32G32_SINT,
  4745. eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
  4746. eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
  4747. eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
  4748. eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
  4749. eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
  4750. eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
  4751. eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
  4752. eR64Uint = VK_FORMAT_R64_UINT,
  4753. eR64Sint = VK_FORMAT_R64_SINT,
  4754. eR64Sfloat = VK_FORMAT_R64_SFLOAT,
  4755. eR64G64Uint = VK_FORMAT_R64G64_UINT,
  4756. eR64G64Sint = VK_FORMAT_R64G64_SINT,
  4757. eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
  4758. eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
  4759. eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
  4760. eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
  4761. eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
  4762. eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
  4763. eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
  4764. eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
  4765. eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
  4766. eD16Unorm = VK_FORMAT_D16_UNORM,
  4767. eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
  4768. eD32Sfloat = VK_FORMAT_D32_SFLOAT,
  4769. eS8Uint = VK_FORMAT_S8_UINT,
  4770. eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
  4771. eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
  4772. eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
  4773. eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
  4774. eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
  4775. eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
  4776. eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
  4777. eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
  4778. eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
  4779. eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
  4780. eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
  4781. eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
  4782. eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
  4783. eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
  4784. eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
  4785. eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
  4786. eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
  4787. eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
  4788. eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
  4789. eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
  4790. eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
  4791. eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
  4792. eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
  4793. eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
  4794. eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
  4795. eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
  4796. eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
  4797. eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
  4798. eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
  4799. eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
  4800. eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
  4801. eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
  4802. eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
  4803. eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
  4804. eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
  4805. eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
  4806. eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
  4807. eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
  4808. eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
  4809. eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
  4810. eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
  4811. eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
  4812. eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
  4813. eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
  4814. eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
  4815. eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
  4816. eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
  4817. eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
  4818. eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
  4819. eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
  4820. eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
  4821. eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
  4822. eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
  4823. eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
  4824. eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
  4825. eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
  4826. eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
  4827. eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
  4828. eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
  4829. eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
  4830. eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
  4831. eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
  4832. eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
  4833. eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
  4834. eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
  4835. eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
  4836. eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
  4837. eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
  4838. eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
  4839. eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
  4840. eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
  4841. eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
  4842. eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
  4843. eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
  4844. eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
  4845. eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
  4846. eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
  4847. eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
  4848. eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
  4849. eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
  4850. eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
  4851. eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
  4852. eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
  4853. eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
  4854. eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
  4855. eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
  4856. eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
  4857. eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
  4858. eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
  4859. eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
  4860. eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
  4861. ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
  4862. ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
  4863. ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
  4864. ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
  4865. ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
  4866. ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
  4867. ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
  4868. ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
  4869. eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
  4870. eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
  4871. eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
  4872. eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
  4873. eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
  4874. eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
  4875. eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
  4876. eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
  4877. eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
  4878. eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
  4879. eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
  4880. eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
  4881. eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
  4882. eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
  4883. eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
  4884. eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
  4885. eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
  4886. eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
  4887. eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
  4888. eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
  4889. eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
  4890. eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
  4891. eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
  4892. eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
  4893. eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
  4894. eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
  4895. eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
  4896. eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
  4897. eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
  4898. eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
  4899. eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
  4900. eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
  4901. eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
  4902. eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
  4903. eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
  4904. eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
  4905. eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
  4906. eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
  4907. eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
  4908. eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
  4909. eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
  4910. eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
  4911. eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
  4912. eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
  4913. eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
  4914. eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
  4915. eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
  4916. eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
  4917. eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
  4918. eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR};
  4919. VULKAN_HPP_INLINE std::string to_string( Format value )
  4920. {
  4921. switch ( value )
  4922. {
  4923. case Format::eUndefined : return "Undefined";
  4924. case Format::eR4G4UnormPack8 : return "R4G4UnormPack8";
  4925. case Format::eR4G4B4A4UnormPack16 : return "R4G4B4A4UnormPack16";
  4926. case Format::eB4G4R4A4UnormPack16 : return "B4G4R4A4UnormPack16";
  4927. case Format::eR5G6B5UnormPack16 : return "R5G6B5UnormPack16";
  4928. case Format::eB5G6R5UnormPack16 : return "B5G6R5UnormPack16";
  4929. case Format::eR5G5B5A1UnormPack16 : return "R5G5B5A1UnormPack16";
  4930. case Format::eB5G5R5A1UnormPack16 : return "B5G5R5A1UnormPack16";
  4931. case Format::eA1R5G5B5UnormPack16 : return "A1R5G5B5UnormPack16";
  4932. case Format::eR8Unorm : return "R8Unorm";
  4933. case Format::eR8Snorm : return "R8Snorm";
  4934. case Format::eR8Uscaled : return "R8Uscaled";
  4935. case Format::eR8Sscaled : return "R8Sscaled";
  4936. case Format::eR8Uint : return "R8Uint";
  4937. case Format::eR8Sint : return "R8Sint";
  4938. case Format::eR8Srgb : return "R8Srgb";
  4939. case Format::eR8G8Unorm : return "R8G8Unorm";
  4940. case Format::eR8G8Snorm : return "R8G8Snorm";
  4941. case Format::eR8G8Uscaled : return "R8G8Uscaled";
  4942. case Format::eR8G8Sscaled : return "R8G8Sscaled";
  4943. case Format::eR8G8Uint : return "R8G8Uint";
  4944. case Format::eR8G8Sint : return "R8G8Sint";
  4945. case Format::eR8G8Srgb : return "R8G8Srgb";
  4946. case Format::eR8G8B8Unorm : return "R8G8B8Unorm";
  4947. case Format::eR8G8B8Snorm : return "R8G8B8Snorm";
  4948. case Format::eR8G8B8Uscaled : return "R8G8B8Uscaled";
  4949. case Format::eR8G8B8Sscaled : return "R8G8B8Sscaled";
  4950. case Format::eR8G8B8Uint : return "R8G8B8Uint";
  4951. case Format::eR8G8B8Sint : return "R8G8B8Sint";
  4952. case Format::eR8G8B8Srgb : return "R8G8B8Srgb";
  4953. case Format::eB8G8R8Unorm : return "B8G8R8Unorm";
  4954. case Format::eB8G8R8Snorm : return "B8G8R8Snorm";
  4955. case Format::eB8G8R8Uscaled : return "B8G8R8Uscaled";
  4956. case Format::eB8G8R8Sscaled : return "B8G8R8Sscaled";
  4957. case Format::eB8G8R8Uint : return "B8G8R8Uint";
  4958. case Format::eB8G8R8Sint : return "B8G8R8Sint";
  4959. case Format::eB8G8R8Srgb : return "B8G8R8Srgb";
  4960. case Format::eR8G8B8A8Unorm : return "R8G8B8A8Unorm";
  4961. case Format::eR8G8B8A8Snorm : return "R8G8B8A8Snorm";
  4962. case Format::eR8G8B8A8Uscaled : return "R8G8B8A8Uscaled";
  4963. case Format::eR8G8B8A8Sscaled : return "R8G8B8A8Sscaled";
  4964. case Format::eR8G8B8A8Uint : return "R8G8B8A8Uint";
  4965. case Format::eR8G8B8A8Sint : return "R8G8B8A8Sint";
  4966. case Format::eR8G8B8A8Srgb : return "R8G8B8A8Srgb";
  4967. case Format::eB8G8R8A8Unorm : return "B8G8R8A8Unorm";
  4968. case Format::eB8G8R8A8Snorm : return "B8G8R8A8Snorm";
  4969. case Format::eB8G8R8A8Uscaled : return "B8G8R8A8Uscaled";
  4970. case Format::eB8G8R8A8Sscaled : return "B8G8R8A8Sscaled";
  4971. case Format::eB8G8R8A8Uint : return "B8G8R8A8Uint";
  4972. case Format::eB8G8R8A8Sint : return "B8G8R8A8Sint";
  4973. case Format::eB8G8R8A8Srgb : return "B8G8R8A8Srgb";
  4974. case Format::eA8B8G8R8UnormPack32 : return "A8B8G8R8UnormPack32";
  4975. case Format::eA8B8G8R8SnormPack32 : return "A8B8G8R8SnormPack32";
  4976. case Format::eA8B8G8R8UscaledPack32 : return "A8B8G8R8UscaledPack32";
  4977. case Format::eA8B8G8R8SscaledPack32 : return "A8B8G8R8SscaledPack32";
  4978. case Format::eA8B8G8R8UintPack32 : return "A8B8G8R8UintPack32";
  4979. case Format::eA8B8G8R8SintPack32 : return "A8B8G8R8SintPack32";
  4980. case Format::eA8B8G8R8SrgbPack32 : return "A8B8G8R8SrgbPack32";
  4981. case Format::eA2R10G10B10UnormPack32 : return "A2R10G10B10UnormPack32";
  4982. case Format::eA2R10G10B10SnormPack32 : return "A2R10G10B10SnormPack32";
  4983. case Format::eA2R10G10B10UscaledPack32 : return "A2R10G10B10UscaledPack32";
  4984. case Format::eA2R10G10B10SscaledPack32 : return "A2R10G10B10SscaledPack32";
  4985. case Format::eA2R10G10B10UintPack32 : return "A2R10G10B10UintPack32";
  4986. case Format::eA2R10G10B10SintPack32 : return "A2R10G10B10SintPack32";
  4987. case Format::eA2B10G10R10UnormPack32 : return "A2B10G10R10UnormPack32";
  4988. case Format::eA2B10G10R10SnormPack32 : return "A2B10G10R10SnormPack32";
  4989. case Format::eA2B10G10R10UscaledPack32 : return "A2B10G10R10UscaledPack32";
  4990. case Format::eA2B10G10R10SscaledPack32 : return "A2B10G10R10SscaledPack32";
  4991. case Format::eA2B10G10R10UintPack32 : return "A2B10G10R10UintPack32";
  4992. case Format::eA2B10G10R10SintPack32 : return "A2B10G10R10SintPack32";
  4993. case Format::eR16Unorm : return "R16Unorm";
  4994. case Format::eR16Snorm : return "R16Snorm";
  4995. case Format::eR16Uscaled : return "R16Uscaled";
  4996. case Format::eR16Sscaled : return "R16Sscaled";
  4997. case Format::eR16Uint : return "R16Uint";
  4998. case Format::eR16Sint : return "R16Sint";
  4999. case Format::eR16Sfloat : return "R16Sfloat";
  5000. case Format::eR16G16Unorm : return "R16G16Unorm";
  5001. case Format::eR16G16Snorm : return "R16G16Snorm";
  5002. case Format::eR16G16Uscaled : return "R16G16Uscaled";
  5003. case Format::eR16G16Sscaled : return "R16G16Sscaled";
  5004. case Format::eR16G16Uint : return "R16G16Uint";
  5005. case Format::eR16G16Sint : return "R16G16Sint";
  5006. case Format::eR16G16Sfloat : return "R16G16Sfloat";
  5007. case Format::eR16G16B16Unorm : return "R16G16B16Unorm";
  5008. case Format::eR16G16B16Snorm : return "R16G16B16Snorm";
  5009. case Format::eR16G16B16Uscaled : return "R16G16B16Uscaled";
  5010. case Format::eR16G16B16Sscaled : return "R16G16B16Sscaled";
  5011. case Format::eR16G16B16Uint : return "R16G16B16Uint";
  5012. case Format::eR16G16B16Sint : return "R16G16B16Sint";
  5013. case Format::eR16G16B16Sfloat : return "R16G16B16Sfloat";
  5014. case Format::eR16G16B16A16Unorm : return "R16G16B16A16Unorm";
  5015. case Format::eR16G16B16A16Snorm : return "R16G16B16A16Snorm";
  5016. case Format::eR16G16B16A16Uscaled : return "R16G16B16A16Uscaled";
  5017. case Format::eR16G16B16A16Sscaled : return "R16G16B16A16Sscaled";
  5018. case Format::eR16G16B16A16Uint : return "R16G16B16A16Uint";
  5019. case Format::eR16G16B16A16Sint : return "R16G16B16A16Sint";
  5020. case Format::eR16G16B16A16Sfloat : return "R16G16B16A16Sfloat";
  5021. case Format::eR32Uint : return "R32Uint";
  5022. case Format::eR32Sint : return "R32Sint";
  5023. case Format::eR32Sfloat : return "R32Sfloat";
  5024. case Format::eR32G32Uint : return "R32G32Uint";
  5025. case Format::eR32G32Sint : return "R32G32Sint";
  5026. case Format::eR32G32Sfloat : return "R32G32Sfloat";
  5027. case Format::eR32G32B32Uint : return "R32G32B32Uint";
  5028. case Format::eR32G32B32Sint : return "R32G32B32Sint";
  5029. case Format::eR32G32B32Sfloat : return "R32G32B32Sfloat";
  5030. case Format::eR32G32B32A32Uint : return "R32G32B32A32Uint";
  5031. case Format::eR32G32B32A32Sint : return "R32G32B32A32Sint";
  5032. case Format::eR32G32B32A32Sfloat : return "R32G32B32A32Sfloat";
  5033. case Format::eR64Uint : return "R64Uint";
  5034. case Format::eR64Sint : return "R64Sint";
  5035. case Format::eR64Sfloat : return "R64Sfloat";
  5036. case Format::eR64G64Uint : return "R64G64Uint";
  5037. case Format::eR64G64Sint : return "R64G64Sint";
  5038. case Format::eR64G64Sfloat : return "R64G64Sfloat";
  5039. case Format::eR64G64B64Uint : return "R64G64B64Uint";
  5040. case Format::eR64G64B64Sint : return "R64G64B64Sint";
  5041. case Format::eR64G64B64Sfloat : return "R64G64B64Sfloat";
  5042. case Format::eR64G64B64A64Uint : return "R64G64B64A64Uint";
  5043. case Format::eR64G64B64A64Sint : return "R64G64B64A64Sint";
  5044. case Format::eR64G64B64A64Sfloat : return "R64G64B64A64Sfloat";
  5045. case Format::eB10G11R11UfloatPack32 : return "B10G11R11UfloatPack32";
  5046. case Format::eE5B9G9R9UfloatPack32 : return "E5B9G9R9UfloatPack32";
  5047. case Format::eD16Unorm : return "D16Unorm";
  5048. case Format::eX8D24UnormPack32 : return "X8D24UnormPack32";
  5049. case Format::eD32Sfloat : return "D32Sfloat";
  5050. case Format::eS8Uint : return "S8Uint";
  5051. case Format::eD16UnormS8Uint : return "D16UnormS8Uint";
  5052. case Format::eD24UnormS8Uint : return "D24UnormS8Uint";
  5053. case Format::eD32SfloatS8Uint : return "D32SfloatS8Uint";
  5054. case Format::eBc1RgbUnormBlock : return "Bc1RgbUnormBlock";
  5055. case Format::eBc1RgbSrgbBlock : return "Bc1RgbSrgbBlock";
  5056. case Format::eBc1RgbaUnormBlock : return "Bc1RgbaUnormBlock";
  5057. case Format::eBc1RgbaSrgbBlock : return "Bc1RgbaSrgbBlock";
  5058. case Format::eBc2UnormBlock : return "Bc2UnormBlock";
  5059. case Format::eBc2SrgbBlock : return "Bc2SrgbBlock";
  5060. case Format::eBc3UnormBlock : return "Bc3UnormBlock";
  5061. case Format::eBc3SrgbBlock : return "Bc3SrgbBlock";
  5062. case Format::eBc4UnormBlock : return "Bc4UnormBlock";
  5063. case Format::eBc4SnormBlock : return "Bc4SnormBlock";
  5064. case Format::eBc5UnormBlock : return "Bc5UnormBlock";
  5065. case Format::eBc5SnormBlock : return "Bc5SnormBlock";
  5066. case Format::eBc6HUfloatBlock : return "Bc6HUfloatBlock";
  5067. case Format::eBc6HSfloatBlock : return "Bc6HSfloatBlock";
  5068. case Format::eBc7UnormBlock : return "Bc7UnormBlock";
  5069. case Format::eBc7SrgbBlock : return "Bc7SrgbBlock";
  5070. case Format::eEtc2R8G8B8UnormBlock : return "Etc2R8G8B8UnormBlock";
  5071. case Format::eEtc2R8G8B8SrgbBlock : return "Etc2R8G8B8SrgbBlock";
  5072. case Format::eEtc2R8G8B8A1UnormBlock : return "Etc2R8G8B8A1UnormBlock";
  5073. case Format::eEtc2R8G8B8A1SrgbBlock : return "Etc2R8G8B8A1SrgbBlock";
  5074. case Format::eEtc2R8G8B8A8UnormBlock : return "Etc2R8G8B8A8UnormBlock";
  5075. case Format::eEtc2R8G8B8A8SrgbBlock : return "Etc2R8G8B8A8SrgbBlock";
  5076. case Format::eEacR11UnormBlock : return "EacR11UnormBlock";
  5077. case Format::eEacR11SnormBlock : return "EacR11SnormBlock";
  5078. case Format::eEacR11G11UnormBlock : return "EacR11G11UnormBlock";
  5079. case Format::eEacR11G11SnormBlock : return "EacR11G11SnormBlock";
  5080. case Format::eAstc4x4UnormBlock : return "Astc4x4UnormBlock";
  5081. case Format::eAstc4x4SrgbBlock : return "Astc4x4SrgbBlock";
  5082. case Format::eAstc5x4UnormBlock : return "Astc5x4UnormBlock";
  5083. case Format::eAstc5x4SrgbBlock : return "Astc5x4SrgbBlock";
  5084. case Format::eAstc5x5UnormBlock : return "Astc5x5UnormBlock";
  5085. case Format::eAstc5x5SrgbBlock : return "Astc5x5SrgbBlock";
  5086. case Format::eAstc6x5UnormBlock : return "Astc6x5UnormBlock";
  5087. case Format::eAstc6x5SrgbBlock : return "Astc6x5SrgbBlock";
  5088. case Format::eAstc6x6UnormBlock : return "Astc6x6UnormBlock";
  5089. case Format::eAstc6x6SrgbBlock : return "Astc6x6SrgbBlock";
  5090. case Format::eAstc8x5UnormBlock : return "Astc8x5UnormBlock";
  5091. case Format::eAstc8x5SrgbBlock : return "Astc8x5SrgbBlock";
  5092. case Format::eAstc8x6UnormBlock : return "Astc8x6UnormBlock";
  5093. case Format::eAstc8x6SrgbBlock : return "Astc8x6SrgbBlock";
  5094. case Format::eAstc8x8UnormBlock : return "Astc8x8UnormBlock";
  5095. case Format::eAstc8x8SrgbBlock : return "Astc8x8SrgbBlock";
  5096. case Format::eAstc10x5UnormBlock : return "Astc10x5UnormBlock";
  5097. case Format::eAstc10x5SrgbBlock : return "Astc10x5SrgbBlock";
  5098. case Format::eAstc10x6UnormBlock : return "Astc10x6UnormBlock";
  5099. case Format::eAstc10x6SrgbBlock : return "Astc10x6SrgbBlock";
  5100. case Format::eAstc10x8UnormBlock : return "Astc10x8UnormBlock";
  5101. case Format::eAstc10x8SrgbBlock : return "Astc10x8SrgbBlock";
  5102. case Format::eAstc10x10UnormBlock : return "Astc10x10UnormBlock";
  5103. case Format::eAstc10x10SrgbBlock : return "Astc10x10SrgbBlock";
  5104. case Format::eAstc12x10UnormBlock : return "Astc12x10UnormBlock";
  5105. case Format::eAstc12x10SrgbBlock : return "Astc12x10SrgbBlock";
  5106. case Format::eAstc12x12UnormBlock : return "Astc12x12UnormBlock";
  5107. case Format::eAstc12x12SrgbBlock : return "Astc12x12SrgbBlock";
  5108. case Format::eG8B8G8R8422Unorm : return "G8B8G8R8422Unorm";
  5109. case Format::eB8G8R8G8422Unorm : return "B8G8R8G8422Unorm";
  5110. case Format::eG8B8R83Plane420Unorm : return "G8B8R83Plane420Unorm";
  5111. case Format::eG8B8R82Plane420Unorm : return "G8B8R82Plane420Unorm";
  5112. case Format::eG8B8R83Plane422Unorm : return "G8B8R83Plane422Unorm";
  5113. case Format::eG8B8R82Plane422Unorm : return "G8B8R82Plane422Unorm";
  5114. case Format::eG8B8R83Plane444Unorm : return "G8B8R83Plane444Unorm";
  5115. case Format::eR10X6UnormPack16 : return "R10X6UnormPack16";
  5116. case Format::eR10X6G10X6Unorm2Pack16 : return "R10X6G10X6Unorm2Pack16";
  5117. case Format::eR10X6G10X6B10X6A10X6Unorm4Pack16 : return "R10X6G10X6B10X6A10X6Unorm4Pack16";
  5118. case Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16 : return "G10X6B10X6G10X6R10X6422Unorm4Pack16";
  5119. case Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16 : return "B10X6G10X6R10X6G10X6422Unorm4Pack16";
  5120. case Format::eG10X6B10X6R10X63Plane420Unorm3Pack16 : return "G10X6B10X6R10X63Plane420Unorm3Pack16";
  5121. case Format::eG10X6B10X6R10X62Plane420Unorm3Pack16 : return "G10X6B10X6R10X62Plane420Unorm3Pack16";
  5122. case Format::eG10X6B10X6R10X63Plane422Unorm3Pack16 : return "G10X6B10X6R10X63Plane422Unorm3Pack16";
  5123. case Format::eG10X6B10X6R10X62Plane422Unorm3Pack16 : return "G10X6B10X6R10X62Plane422Unorm3Pack16";
  5124. case Format::eG10X6B10X6R10X63Plane444Unorm3Pack16 : return "G10X6B10X6R10X63Plane444Unorm3Pack16";
  5125. case Format::eR12X4UnormPack16 : return "R12X4UnormPack16";
  5126. case Format::eR12X4G12X4Unorm2Pack16 : return "R12X4G12X4Unorm2Pack16";
  5127. case Format::eR12X4G12X4B12X4A12X4Unorm4Pack16 : return "R12X4G12X4B12X4A12X4Unorm4Pack16";
  5128. case Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16 : return "G12X4B12X4G12X4R12X4422Unorm4Pack16";
  5129. case Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16 : return "B12X4G12X4R12X4G12X4422Unorm4Pack16";
  5130. case Format::eG12X4B12X4R12X43Plane420Unorm3Pack16 : return "G12X4B12X4R12X43Plane420Unorm3Pack16";
  5131. case Format::eG12X4B12X4R12X42Plane420Unorm3Pack16 : return "G12X4B12X4R12X42Plane420Unorm3Pack16";
  5132. case Format::eG12X4B12X4R12X43Plane422Unorm3Pack16 : return "G12X4B12X4R12X43Plane422Unorm3Pack16";
  5133. case Format::eG12X4B12X4R12X42Plane422Unorm3Pack16 : return "G12X4B12X4R12X42Plane422Unorm3Pack16";
  5134. case Format::eG12X4B12X4R12X43Plane444Unorm3Pack16 : return "G12X4B12X4R12X43Plane444Unorm3Pack16";
  5135. case Format::eG16B16G16R16422Unorm : return "G16B16G16R16422Unorm";
  5136. case Format::eB16G16R16G16422Unorm : return "B16G16R16G16422Unorm";
  5137. case Format::eG16B16R163Plane420Unorm : return "G16B16R163Plane420Unorm";
  5138. case Format::eG16B16R162Plane420Unorm : return "G16B16R162Plane420Unorm";
  5139. case Format::eG16B16R163Plane422Unorm : return "G16B16R163Plane422Unorm";
  5140. case Format::eG16B16R162Plane422Unorm : return "G16B16R162Plane422Unorm";
  5141. case Format::eG16B16R163Plane444Unorm : return "G16B16R163Plane444Unorm";
  5142. case Format::ePvrtc12BppUnormBlockIMG : return "Pvrtc12BppUnormBlockIMG";
  5143. case Format::ePvrtc14BppUnormBlockIMG : return "Pvrtc14BppUnormBlockIMG";
  5144. case Format::ePvrtc22BppUnormBlockIMG : return "Pvrtc22BppUnormBlockIMG";
  5145. case Format::ePvrtc24BppUnormBlockIMG : return "Pvrtc24BppUnormBlockIMG";
  5146. case Format::ePvrtc12BppSrgbBlockIMG : return "Pvrtc12BppSrgbBlockIMG";
  5147. case Format::ePvrtc14BppSrgbBlockIMG : return "Pvrtc14BppSrgbBlockIMG";
  5148. case Format::ePvrtc22BppSrgbBlockIMG : return "Pvrtc22BppSrgbBlockIMG";
  5149. case Format::ePvrtc24BppSrgbBlockIMG : return "Pvrtc24BppSrgbBlockIMG";
  5150. case Format::eAstc4x4SfloatBlockEXT : return "Astc4x4SfloatBlockEXT";
  5151. case Format::eAstc5x4SfloatBlockEXT : return "Astc5x4SfloatBlockEXT";
  5152. case Format::eAstc5x5SfloatBlockEXT : return "Astc5x5SfloatBlockEXT";
  5153. case Format::eAstc6x5SfloatBlockEXT : return "Astc6x5SfloatBlockEXT";
  5154. case Format::eAstc6x6SfloatBlockEXT : return "Astc6x6SfloatBlockEXT";
  5155. case Format::eAstc8x5SfloatBlockEXT : return "Astc8x5SfloatBlockEXT";
  5156. case Format::eAstc8x6SfloatBlockEXT : return "Astc8x6SfloatBlockEXT";
  5157. case Format::eAstc8x8SfloatBlockEXT : return "Astc8x8SfloatBlockEXT";
  5158. case Format::eAstc10x5SfloatBlockEXT : return "Astc10x5SfloatBlockEXT";
  5159. case Format::eAstc10x6SfloatBlockEXT : return "Astc10x6SfloatBlockEXT";
  5160. case Format::eAstc10x8SfloatBlockEXT : return "Astc10x8SfloatBlockEXT";
  5161. case Format::eAstc10x10SfloatBlockEXT : return "Astc10x10SfloatBlockEXT";
  5162. case Format::eAstc12x10SfloatBlockEXT : return "Astc12x10SfloatBlockEXT";
  5163. case Format::eAstc12x12SfloatBlockEXT : return "Astc12x12SfloatBlockEXT";
  5164. case Format::eA4R4G4B4UnormPack16EXT : return "A4R4G4B4UnormPack16EXT";
  5165. case Format::eA4B4G4R4UnormPack16EXT : return "A4B4G4R4UnormPack16EXT";
  5166. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5167. }
  5168. }
  5169. enum class FormatFeatureFlagBits : VkFormatFeatureFlags
  5170. {
  5171. eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
  5172. eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
  5173. eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
  5174. eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
  5175. eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
  5176. eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
  5177. eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
  5178. eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
  5179. eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
  5180. eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
  5181. eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
  5182. eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
  5183. eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
  5184. eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
  5185. eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
  5186. eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
  5187. eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
  5188. eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
  5189. eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
  5190. eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
  5191. eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
  5192. eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
  5193. eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
  5194. eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
  5195. eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
  5196. eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
  5197. eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
  5198. eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
  5199. eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
  5200. eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
  5201. eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
  5202. eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
  5203. eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
  5204. eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
  5205. eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
  5206. eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
  5207. eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
  5208. eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR};
  5209. VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlagBits value )
  5210. {
  5211. switch ( value )
  5212. {
  5213. case FormatFeatureFlagBits::eSampledImage : return "SampledImage";
  5214. case FormatFeatureFlagBits::eStorageImage : return "StorageImage";
  5215. case FormatFeatureFlagBits::eStorageImageAtomic : return "StorageImageAtomic";
  5216. case FormatFeatureFlagBits::eUniformTexelBuffer : return "UniformTexelBuffer";
  5217. case FormatFeatureFlagBits::eStorageTexelBuffer : return "StorageTexelBuffer";
  5218. case FormatFeatureFlagBits::eStorageTexelBufferAtomic : return "StorageTexelBufferAtomic";
  5219. case FormatFeatureFlagBits::eVertexBuffer : return "VertexBuffer";
  5220. case FormatFeatureFlagBits::eColorAttachment : return "ColorAttachment";
  5221. case FormatFeatureFlagBits::eColorAttachmentBlend : return "ColorAttachmentBlend";
  5222. case FormatFeatureFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
  5223. case FormatFeatureFlagBits::eBlitSrc : return "BlitSrc";
  5224. case FormatFeatureFlagBits::eBlitDst : return "BlitDst";
  5225. case FormatFeatureFlagBits::eSampledImageFilterLinear : return "SampledImageFilterLinear";
  5226. case FormatFeatureFlagBits::eTransferSrc : return "TransferSrc";
  5227. case FormatFeatureFlagBits::eTransferDst : return "TransferDst";
  5228. case FormatFeatureFlagBits::eMidpointChromaSamples : return "MidpointChromaSamples";
  5229. case FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter : return "SampledImageYcbcrConversionLinearFilter";
  5230. case FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter : return "SampledImageYcbcrConversionSeparateReconstructionFilter";
  5231. case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit : return "SampledImageYcbcrConversionChromaReconstructionExplicit";
  5232. case FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable : return "SampledImageYcbcrConversionChromaReconstructionExplicitForceable";
  5233. case FormatFeatureFlagBits::eDisjoint : return "Disjoint";
  5234. case FormatFeatureFlagBits::eCositedChromaSamples : return "CositedChromaSamples";
  5235. case FormatFeatureFlagBits::eSampledImageFilterMinmax : return "SampledImageFilterMinmax";
  5236. case FormatFeatureFlagBits::eSampledImageFilterCubicIMG : return "SampledImageFilterCubicIMG";
  5237. case FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR : return "AccelerationStructureVertexBufferKHR";
  5238. case FormatFeatureFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
  5239. case FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR : return "FragmentShadingRateAttachmentKHR";
  5240. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5241. }
  5242. }
  5243. enum class FragmentShadingRateCombinerOpKHR
  5244. {
  5245. eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
  5246. eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR,
  5247. eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR,
  5248. eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR,
  5249. eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR};
  5250. VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateCombinerOpKHR value )
  5251. {
  5252. switch ( value )
  5253. {
  5254. case FragmentShadingRateCombinerOpKHR::eKeep : return "Keep";
  5255. case FragmentShadingRateCombinerOpKHR::eReplace : return "Replace";
  5256. case FragmentShadingRateCombinerOpKHR::eMin : return "Min";
  5257. case FragmentShadingRateCombinerOpKHR::eMax : return "Max";
  5258. case FragmentShadingRateCombinerOpKHR::eMul : return "Mul";
  5259. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5260. }
  5261. }
  5262. enum class FragmentShadingRateNV
  5263. {
  5264. e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV,
  5265. e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV,
  5266. e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV,
  5267. e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV,
  5268. e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV,
  5269. e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV,
  5270. e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV,
  5271. e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV,
  5272. e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV,
  5273. e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV,
  5274. e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV,
  5275. eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV};
  5276. VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateNV value )
  5277. {
  5278. switch ( value )
  5279. {
  5280. case FragmentShadingRateNV::e1InvocationPerPixel : return "1InvocationPerPixel";
  5281. case FragmentShadingRateNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
  5282. case FragmentShadingRateNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
  5283. case FragmentShadingRateNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
  5284. case FragmentShadingRateNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
  5285. case FragmentShadingRateNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
  5286. case FragmentShadingRateNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
  5287. case FragmentShadingRateNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
  5288. case FragmentShadingRateNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
  5289. case FragmentShadingRateNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
  5290. case FragmentShadingRateNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
  5291. case FragmentShadingRateNV::eNoInvocations : return "NoInvocations";
  5292. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5293. }
  5294. }
  5295. enum class FragmentShadingRateTypeNV
  5296. {
  5297. eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV,
  5298. eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV};
  5299. VULKAN_HPP_INLINE std::string to_string( FragmentShadingRateTypeNV value )
  5300. {
  5301. switch ( value )
  5302. {
  5303. case FragmentShadingRateTypeNV::eFragmentSize : return "FragmentSize";
  5304. case FragmentShadingRateTypeNV::eEnums : return "Enums";
  5305. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5306. }
  5307. }
  5308. enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
  5309. {
  5310. eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
  5311. eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR};
  5312. VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlagBits value )
  5313. {
  5314. switch ( value )
  5315. {
  5316. case FramebufferCreateFlagBits::eImageless : return "Imageless";
  5317. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5318. }
  5319. }
  5320. enum class FrontFace
  5321. {
  5322. eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
  5323. eClockwise = VK_FRONT_FACE_CLOCKWISE};
  5324. VULKAN_HPP_INLINE std::string to_string( FrontFace value )
  5325. {
  5326. switch ( value )
  5327. {
  5328. case FrontFace::eCounterClockwise : return "CounterClockwise";
  5329. case FrontFace::eClockwise : return "Clockwise";
  5330. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5331. }
  5332. }
  5333. #ifdef VK_USE_PLATFORM_WIN32_KHR
  5334. enum class FullScreenExclusiveEXT
  5335. {
  5336. eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
  5337. eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
  5338. eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
  5339. eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT};
  5340. VULKAN_HPP_INLINE std::string to_string( FullScreenExclusiveEXT value )
  5341. {
  5342. switch ( value )
  5343. {
  5344. case FullScreenExclusiveEXT::eDefault : return "Default";
  5345. case FullScreenExclusiveEXT::eAllowed : return "Allowed";
  5346. case FullScreenExclusiveEXT::eDisallowed : return "Disallowed";
  5347. case FullScreenExclusiveEXT::eApplicationControlled : return "ApplicationControlled";
  5348. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5349. }
  5350. }
  5351. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  5352. enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
  5353. {
  5354. eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
  5355. eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR};
  5356. using GeometryFlagBitsNV = GeometryFlagBitsKHR;
  5357. VULKAN_HPP_INLINE std::string to_string( GeometryFlagBitsKHR value )
  5358. {
  5359. switch ( value )
  5360. {
  5361. case GeometryFlagBitsKHR::eOpaque : return "Opaque";
  5362. case GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation : return "NoDuplicateAnyHitInvocation";
  5363. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5364. }
  5365. }
  5366. enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
  5367. {
  5368. eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
  5369. eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
  5370. eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
  5371. eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
  5372. eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV};
  5373. using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
  5374. VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagBitsKHR value )
  5375. {
  5376. switch ( value )
  5377. {
  5378. case GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable : return "TriangleFacingCullDisable";
  5379. case GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise : return "TriangleFrontCounterclockwise";
  5380. case GeometryInstanceFlagBitsKHR::eForceOpaque : return "ForceOpaque";
  5381. case GeometryInstanceFlagBitsKHR::eForceNoOpaque : return "ForceNoOpaque";
  5382. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5383. }
  5384. }
  5385. enum class GeometryTypeKHR
  5386. {
  5387. eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
  5388. eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR,
  5389. eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR};
  5390. using GeometryTypeNV = GeometryTypeKHR;
  5391. VULKAN_HPP_INLINE std::string to_string( GeometryTypeKHR value )
  5392. {
  5393. switch ( value )
  5394. {
  5395. case GeometryTypeKHR::eTriangles : return "Triangles";
  5396. case GeometryTypeKHR::eAabbs : return "Aabbs";
  5397. case GeometryTypeKHR::eInstances : return "Instances";
  5398. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5399. }
  5400. }
  5401. enum class ImageAspectFlagBits : VkImageAspectFlags
  5402. {
  5403. eColor = VK_IMAGE_ASPECT_COLOR_BIT,
  5404. eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
  5405. eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
  5406. eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
  5407. ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
  5408. ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
  5409. ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
  5410. eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
  5411. eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
  5412. eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
  5413. eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
  5414. ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
  5415. ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
  5416. ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR};
  5417. VULKAN_HPP_INLINE std::string to_string( ImageAspectFlagBits value )
  5418. {
  5419. switch ( value )
  5420. {
  5421. case ImageAspectFlagBits::eColor : return "Color";
  5422. case ImageAspectFlagBits::eDepth : return "Depth";
  5423. case ImageAspectFlagBits::eStencil : return "Stencil";
  5424. case ImageAspectFlagBits::eMetadata : return "Metadata";
  5425. case ImageAspectFlagBits::ePlane0 : return "Plane0";
  5426. case ImageAspectFlagBits::ePlane1 : return "Plane1";
  5427. case ImageAspectFlagBits::ePlane2 : return "Plane2";
  5428. case ImageAspectFlagBits::eMemoryPlane0EXT : return "MemoryPlane0EXT";
  5429. case ImageAspectFlagBits::eMemoryPlane1EXT : return "MemoryPlane1EXT";
  5430. case ImageAspectFlagBits::eMemoryPlane2EXT : return "MemoryPlane2EXT";
  5431. case ImageAspectFlagBits::eMemoryPlane3EXT : return "MemoryPlane3EXT";
  5432. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5433. }
  5434. }
  5435. enum class ImageCreateFlagBits : VkImageCreateFlags
  5436. {
  5437. eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
  5438. eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
  5439. eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
  5440. eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
  5441. eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
  5442. eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
  5443. eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
  5444. e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
  5445. eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
  5446. eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
  5447. eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
  5448. eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
  5449. eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
  5450. eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
  5451. eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
  5452. e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
  5453. eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
  5454. eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
  5455. eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
  5456. eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
  5457. eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR};
  5458. VULKAN_HPP_INLINE std::string to_string( ImageCreateFlagBits value )
  5459. {
  5460. switch ( value )
  5461. {
  5462. case ImageCreateFlagBits::eSparseBinding : return "SparseBinding";
  5463. case ImageCreateFlagBits::eSparseResidency : return "SparseResidency";
  5464. case ImageCreateFlagBits::eSparseAliased : return "SparseAliased";
  5465. case ImageCreateFlagBits::eMutableFormat : return "MutableFormat";
  5466. case ImageCreateFlagBits::eCubeCompatible : return "CubeCompatible";
  5467. case ImageCreateFlagBits::eAlias : return "Alias";
  5468. case ImageCreateFlagBits::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
  5469. case ImageCreateFlagBits::e2DArrayCompatible : return "2DArrayCompatible";
  5470. case ImageCreateFlagBits::eBlockTexelViewCompatible : return "BlockTexelViewCompatible";
  5471. case ImageCreateFlagBits::eExtendedUsage : return "ExtendedUsage";
  5472. case ImageCreateFlagBits::eProtected : return "Protected";
  5473. case ImageCreateFlagBits::eDisjoint : return "Disjoint";
  5474. case ImageCreateFlagBits::eCornerSampledNV : return "CornerSampledNV";
  5475. case ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT : return "SampleLocationsCompatibleDepthEXT";
  5476. case ImageCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
  5477. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5478. }
  5479. }
  5480. enum class ImageLayout
  5481. {
  5482. eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
  5483. eGeneral = VK_IMAGE_LAYOUT_GENERAL,
  5484. eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  5485. eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  5486. eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
  5487. eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
  5488. eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  5489. eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  5490. ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
  5491. eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
  5492. eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
  5493. eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
  5494. eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
  5495. eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
  5496. eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
  5497. ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  5498. eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
  5499. eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
  5500. eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
  5501. eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
  5502. eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
  5503. eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
  5504. eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
  5505. eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
  5506. eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
  5507. eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR};
  5508. VULKAN_HPP_INLINE std::string to_string( ImageLayout value )
  5509. {
  5510. switch ( value )
  5511. {
  5512. case ImageLayout::eUndefined : return "Undefined";
  5513. case ImageLayout::eGeneral : return "General";
  5514. case ImageLayout::eColorAttachmentOptimal : return "ColorAttachmentOptimal";
  5515. case ImageLayout::eDepthStencilAttachmentOptimal : return "DepthStencilAttachmentOptimal";
  5516. case ImageLayout::eDepthStencilReadOnlyOptimal : return "DepthStencilReadOnlyOptimal";
  5517. case ImageLayout::eShaderReadOnlyOptimal : return "ShaderReadOnlyOptimal";
  5518. case ImageLayout::eTransferSrcOptimal : return "TransferSrcOptimal";
  5519. case ImageLayout::eTransferDstOptimal : return "TransferDstOptimal";
  5520. case ImageLayout::ePreinitialized : return "Preinitialized";
  5521. case ImageLayout::eDepthReadOnlyStencilAttachmentOptimal : return "DepthReadOnlyStencilAttachmentOptimal";
  5522. case ImageLayout::eDepthAttachmentStencilReadOnlyOptimal : return "DepthAttachmentStencilReadOnlyOptimal";
  5523. case ImageLayout::eDepthAttachmentOptimal : return "DepthAttachmentOptimal";
  5524. case ImageLayout::eDepthReadOnlyOptimal : return "DepthReadOnlyOptimal";
  5525. case ImageLayout::eStencilAttachmentOptimal : return "StencilAttachmentOptimal";
  5526. case ImageLayout::eStencilReadOnlyOptimal : return "StencilReadOnlyOptimal";
  5527. case ImageLayout::ePresentSrcKHR : return "PresentSrcKHR";
  5528. case ImageLayout::eSharedPresentKHR : return "SharedPresentKHR";
  5529. case ImageLayout::eShadingRateOptimalNV : return "ShadingRateOptimalNV";
  5530. case ImageLayout::eFragmentDensityMapOptimalEXT : return "FragmentDensityMapOptimalEXT";
  5531. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5532. }
  5533. }
  5534. enum class ImageTiling
  5535. {
  5536. eOptimal = VK_IMAGE_TILING_OPTIMAL,
  5537. eLinear = VK_IMAGE_TILING_LINEAR,
  5538. eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT};
  5539. VULKAN_HPP_INLINE std::string to_string( ImageTiling value )
  5540. {
  5541. switch ( value )
  5542. {
  5543. case ImageTiling::eOptimal : return "Optimal";
  5544. case ImageTiling::eLinear : return "Linear";
  5545. case ImageTiling::eDrmFormatModifierEXT : return "DrmFormatModifierEXT";
  5546. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5547. }
  5548. }
  5549. enum class ImageType
  5550. {
  5551. e1D = VK_IMAGE_TYPE_1D,
  5552. e2D = VK_IMAGE_TYPE_2D,
  5553. e3D = VK_IMAGE_TYPE_3D};
  5554. VULKAN_HPP_INLINE std::string to_string( ImageType value )
  5555. {
  5556. switch ( value )
  5557. {
  5558. case ImageType::e1D : return "1D";
  5559. case ImageType::e2D : return "2D";
  5560. case ImageType::e3D : return "3D";
  5561. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5562. }
  5563. }
  5564. enum class ImageUsageFlagBits : VkImageUsageFlags
  5565. {
  5566. eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
  5567. eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
  5568. eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
  5569. eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
  5570. eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  5571. eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  5572. eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
  5573. eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
  5574. eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV,
  5575. eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
  5576. eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR};
  5577. VULKAN_HPP_INLINE std::string to_string( ImageUsageFlagBits value )
  5578. {
  5579. switch ( value )
  5580. {
  5581. case ImageUsageFlagBits::eTransferSrc : return "TransferSrc";
  5582. case ImageUsageFlagBits::eTransferDst : return "TransferDst";
  5583. case ImageUsageFlagBits::eSampled : return "Sampled";
  5584. case ImageUsageFlagBits::eStorage : return "Storage";
  5585. case ImageUsageFlagBits::eColorAttachment : return "ColorAttachment";
  5586. case ImageUsageFlagBits::eDepthStencilAttachment : return "DepthStencilAttachment";
  5587. case ImageUsageFlagBits::eTransientAttachment : return "TransientAttachment";
  5588. case ImageUsageFlagBits::eInputAttachment : return "InputAttachment";
  5589. case ImageUsageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
  5590. case ImageUsageFlagBits::eFragmentDensityMapEXT : return "FragmentDensityMapEXT";
  5591. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5592. }
  5593. }
  5594. enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
  5595. {
  5596. eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
  5597. eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT};
  5598. VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlagBits value )
  5599. {
  5600. switch ( value )
  5601. {
  5602. case ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT : return "FragmentDensityMapDynamicEXT";
  5603. case ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT : return "FragmentDensityMapDeferredEXT";
  5604. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5605. }
  5606. }
  5607. enum class ImageViewType
  5608. {
  5609. e1D = VK_IMAGE_VIEW_TYPE_1D,
  5610. e2D = VK_IMAGE_VIEW_TYPE_2D,
  5611. e3D = VK_IMAGE_VIEW_TYPE_3D,
  5612. eCube = VK_IMAGE_VIEW_TYPE_CUBE,
  5613. e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
  5614. e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
  5615. eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY};
  5616. VULKAN_HPP_INLINE std::string to_string( ImageViewType value )
  5617. {
  5618. switch ( value )
  5619. {
  5620. case ImageViewType::e1D : return "1D";
  5621. case ImageViewType::e2D : return "2D";
  5622. case ImageViewType::e3D : return "3D";
  5623. case ImageViewType::eCube : return "Cube";
  5624. case ImageViewType::e1DArray : return "1DArray";
  5625. case ImageViewType::e2DArray : return "2DArray";
  5626. case ImageViewType::eCubeArray : return "CubeArray";
  5627. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5628. }
  5629. }
  5630. enum class IndexType
  5631. {
  5632. eUint16 = VK_INDEX_TYPE_UINT16,
  5633. eUint32 = VK_INDEX_TYPE_UINT32,
  5634. eNoneKHR = VK_INDEX_TYPE_NONE_KHR,
  5635. eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
  5636. eNoneNV = VK_INDEX_TYPE_NONE_NV};
  5637. VULKAN_HPP_INLINE std::string to_string( IndexType value )
  5638. {
  5639. switch ( value )
  5640. {
  5641. case IndexType::eUint16 : return "Uint16";
  5642. case IndexType::eUint32 : return "Uint32";
  5643. case IndexType::eNoneKHR : return "NoneKHR";
  5644. case IndexType::eUint8EXT : return "Uint8EXT";
  5645. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5646. }
  5647. }
  5648. enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
  5649. {
  5650. eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
  5651. eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
  5652. eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV};
  5653. VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagBitsNV value )
  5654. {
  5655. switch ( value )
  5656. {
  5657. case IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess : return "ExplicitPreprocess";
  5658. case IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences : return "IndexedSequences";
  5659. case IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences : return "UnorderedSequences";
  5660. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5661. }
  5662. }
  5663. enum class IndirectCommandsTokenTypeNV
  5664. {
  5665. eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
  5666. eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
  5667. eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
  5668. eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
  5669. ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
  5670. eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
  5671. eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
  5672. eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV};
  5673. VULKAN_HPP_INLINE std::string to_string( IndirectCommandsTokenTypeNV value )
  5674. {
  5675. switch ( value )
  5676. {
  5677. case IndirectCommandsTokenTypeNV::eShaderGroup : return "ShaderGroup";
  5678. case IndirectCommandsTokenTypeNV::eStateFlags : return "StateFlags";
  5679. case IndirectCommandsTokenTypeNV::eIndexBuffer : return "IndexBuffer";
  5680. case IndirectCommandsTokenTypeNV::eVertexBuffer : return "VertexBuffer";
  5681. case IndirectCommandsTokenTypeNV::ePushConstant : return "PushConstant";
  5682. case IndirectCommandsTokenTypeNV::eDrawIndexed : return "DrawIndexed";
  5683. case IndirectCommandsTokenTypeNV::eDraw : return "Draw";
  5684. case IndirectCommandsTokenTypeNV::eDrawTasks : return "DrawTasks";
  5685. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5686. }
  5687. }
  5688. enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
  5689. {
  5690. eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV};
  5691. VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagBitsNV value )
  5692. {
  5693. switch ( value )
  5694. {
  5695. case IndirectStateFlagBitsNV::eFlagFrontface : return "FlagFrontface";
  5696. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5697. }
  5698. }
  5699. enum class InstanceCreateFlagBits
  5700. {};
  5701. VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlagBits )
  5702. {
  5703. return "(void)";
  5704. }
  5705. enum class InternalAllocationType
  5706. {
  5707. eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE};
  5708. VULKAN_HPP_INLINE std::string to_string( InternalAllocationType value )
  5709. {
  5710. switch ( value )
  5711. {
  5712. case InternalAllocationType::eExecutable : return "Executable";
  5713. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5714. }
  5715. }
  5716. enum class LineRasterizationModeEXT
  5717. {
  5718. eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
  5719. eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
  5720. eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
  5721. eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT};
  5722. VULKAN_HPP_INLINE std::string to_string( LineRasterizationModeEXT value )
  5723. {
  5724. switch ( value )
  5725. {
  5726. case LineRasterizationModeEXT::eDefault : return "Default";
  5727. case LineRasterizationModeEXT::eRectangular : return "Rectangular";
  5728. case LineRasterizationModeEXT::eBresenham : return "Bresenham";
  5729. case LineRasterizationModeEXT::eRectangularSmooth : return "RectangularSmooth";
  5730. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5731. }
  5732. }
  5733. enum class LogicOp
  5734. {
  5735. eClear = VK_LOGIC_OP_CLEAR,
  5736. eAnd = VK_LOGIC_OP_AND,
  5737. eAndReverse = VK_LOGIC_OP_AND_REVERSE,
  5738. eCopy = VK_LOGIC_OP_COPY,
  5739. eAndInverted = VK_LOGIC_OP_AND_INVERTED,
  5740. eNoOp = VK_LOGIC_OP_NO_OP,
  5741. eXor = VK_LOGIC_OP_XOR,
  5742. eOr = VK_LOGIC_OP_OR,
  5743. eNor = VK_LOGIC_OP_NOR,
  5744. eEquivalent = VK_LOGIC_OP_EQUIVALENT,
  5745. eInvert = VK_LOGIC_OP_INVERT,
  5746. eOrReverse = VK_LOGIC_OP_OR_REVERSE,
  5747. eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
  5748. eOrInverted = VK_LOGIC_OP_OR_INVERTED,
  5749. eNand = VK_LOGIC_OP_NAND,
  5750. eSet = VK_LOGIC_OP_SET};
  5751. VULKAN_HPP_INLINE std::string to_string( LogicOp value )
  5752. {
  5753. switch ( value )
  5754. {
  5755. case LogicOp::eClear : return "Clear";
  5756. case LogicOp::eAnd : return "And";
  5757. case LogicOp::eAndReverse : return "AndReverse";
  5758. case LogicOp::eCopy : return "Copy";
  5759. case LogicOp::eAndInverted : return "AndInverted";
  5760. case LogicOp::eNoOp : return "NoOp";
  5761. case LogicOp::eXor : return "Xor";
  5762. case LogicOp::eOr : return "Or";
  5763. case LogicOp::eNor : return "Nor";
  5764. case LogicOp::eEquivalent : return "Equivalent";
  5765. case LogicOp::eInvert : return "Invert";
  5766. case LogicOp::eOrReverse : return "OrReverse";
  5767. case LogicOp::eCopyInverted : return "CopyInverted";
  5768. case LogicOp::eOrInverted : return "OrInverted";
  5769. case LogicOp::eNand : return "Nand";
  5770. case LogicOp::eSet : return "Set";
  5771. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5772. }
  5773. }
  5774. enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
  5775. {
  5776. eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
  5777. eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
  5778. eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT};
  5779. using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
  5780. VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlagBits value )
  5781. {
  5782. switch ( value )
  5783. {
  5784. case MemoryAllocateFlagBits::eDeviceMask : return "DeviceMask";
  5785. case MemoryAllocateFlagBits::eDeviceAddress : return "DeviceAddress";
  5786. case MemoryAllocateFlagBits::eDeviceAddressCaptureReplay : return "DeviceAddressCaptureReplay";
  5787. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5788. }
  5789. }
  5790. enum class MemoryHeapFlagBits : VkMemoryHeapFlags
  5791. {
  5792. eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
  5793. eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
  5794. eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR};
  5795. VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlagBits value )
  5796. {
  5797. switch ( value )
  5798. {
  5799. case MemoryHeapFlagBits::eDeviceLocal : return "DeviceLocal";
  5800. case MemoryHeapFlagBits::eMultiInstance : return "MultiInstance";
  5801. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5802. }
  5803. }
  5804. enum class MemoryOverallocationBehaviorAMD
  5805. {
  5806. eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
  5807. eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
  5808. eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD};
  5809. VULKAN_HPP_INLINE std::string to_string( MemoryOverallocationBehaviorAMD value )
  5810. {
  5811. switch ( value )
  5812. {
  5813. case MemoryOverallocationBehaviorAMD::eDefault : return "Default";
  5814. case MemoryOverallocationBehaviorAMD::eAllowed : return "Allowed";
  5815. case MemoryOverallocationBehaviorAMD::eDisallowed : return "Disallowed";
  5816. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5817. }
  5818. }
  5819. enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
  5820. {
  5821. eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
  5822. eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
  5823. eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
  5824. eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
  5825. eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
  5826. eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
  5827. eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
  5828. eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD};
  5829. VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlagBits value )
  5830. {
  5831. switch ( value )
  5832. {
  5833. case MemoryPropertyFlagBits::eDeviceLocal : return "DeviceLocal";
  5834. case MemoryPropertyFlagBits::eHostVisible : return "HostVisible";
  5835. case MemoryPropertyFlagBits::eHostCoherent : return "HostCoherent";
  5836. case MemoryPropertyFlagBits::eHostCached : return "HostCached";
  5837. case MemoryPropertyFlagBits::eLazilyAllocated : return "LazilyAllocated";
  5838. case MemoryPropertyFlagBits::eProtected : return "Protected";
  5839. case MemoryPropertyFlagBits::eDeviceCoherentAMD : return "DeviceCoherentAMD";
  5840. case MemoryPropertyFlagBits::eDeviceUncachedAMD : return "DeviceUncachedAMD";
  5841. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5842. }
  5843. }
  5844. enum class ObjectType
  5845. {
  5846. eUnknown = VK_OBJECT_TYPE_UNKNOWN,
  5847. eInstance = VK_OBJECT_TYPE_INSTANCE,
  5848. ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
  5849. eDevice = VK_OBJECT_TYPE_DEVICE,
  5850. eQueue = VK_OBJECT_TYPE_QUEUE,
  5851. eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
  5852. eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
  5853. eFence = VK_OBJECT_TYPE_FENCE,
  5854. eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
  5855. eBuffer = VK_OBJECT_TYPE_BUFFER,
  5856. eImage = VK_OBJECT_TYPE_IMAGE,
  5857. eEvent = VK_OBJECT_TYPE_EVENT,
  5858. eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
  5859. eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
  5860. eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
  5861. eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
  5862. ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
  5863. ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
  5864. eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
  5865. ePipeline = VK_OBJECT_TYPE_PIPELINE,
  5866. eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
  5867. eSampler = VK_OBJECT_TYPE_SAMPLER,
  5868. eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
  5869. eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
  5870. eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
  5871. eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
  5872. eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
  5873. eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
  5874. eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
  5875. eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
  5876. eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
  5877. eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
  5878. eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
  5879. eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
  5880. eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
  5881. eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
  5882. eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
  5883. ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
  5884. eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
  5885. eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
  5886. ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
  5887. eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
  5888. eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR};
  5889. VULKAN_HPP_INLINE std::string to_string( ObjectType value )
  5890. {
  5891. switch ( value )
  5892. {
  5893. case ObjectType::eUnknown : return "Unknown";
  5894. case ObjectType::eInstance : return "Instance";
  5895. case ObjectType::ePhysicalDevice : return "PhysicalDevice";
  5896. case ObjectType::eDevice : return "Device";
  5897. case ObjectType::eQueue : return "Queue";
  5898. case ObjectType::eSemaphore : return "Semaphore";
  5899. case ObjectType::eCommandBuffer : return "CommandBuffer";
  5900. case ObjectType::eFence : return "Fence";
  5901. case ObjectType::eDeviceMemory : return "DeviceMemory";
  5902. case ObjectType::eBuffer : return "Buffer";
  5903. case ObjectType::eImage : return "Image";
  5904. case ObjectType::eEvent : return "Event";
  5905. case ObjectType::eQueryPool : return "QueryPool";
  5906. case ObjectType::eBufferView : return "BufferView";
  5907. case ObjectType::eImageView : return "ImageView";
  5908. case ObjectType::eShaderModule : return "ShaderModule";
  5909. case ObjectType::ePipelineCache : return "PipelineCache";
  5910. case ObjectType::ePipelineLayout : return "PipelineLayout";
  5911. case ObjectType::eRenderPass : return "RenderPass";
  5912. case ObjectType::ePipeline : return "Pipeline";
  5913. case ObjectType::eDescriptorSetLayout : return "DescriptorSetLayout";
  5914. case ObjectType::eSampler : return "Sampler";
  5915. case ObjectType::eDescriptorPool : return "DescriptorPool";
  5916. case ObjectType::eDescriptorSet : return "DescriptorSet";
  5917. case ObjectType::eFramebuffer : return "Framebuffer";
  5918. case ObjectType::eCommandPool : return "CommandPool";
  5919. case ObjectType::eSamplerYcbcrConversion : return "SamplerYcbcrConversion";
  5920. case ObjectType::eDescriptorUpdateTemplate : return "DescriptorUpdateTemplate";
  5921. case ObjectType::eSurfaceKHR : return "SurfaceKHR";
  5922. case ObjectType::eSwapchainKHR : return "SwapchainKHR";
  5923. case ObjectType::eDisplayKHR : return "DisplayKHR";
  5924. case ObjectType::eDisplayModeKHR : return "DisplayModeKHR";
  5925. case ObjectType::eDebugReportCallbackEXT : return "DebugReportCallbackEXT";
  5926. case ObjectType::eDebugUtilsMessengerEXT : return "DebugUtilsMessengerEXT";
  5927. case ObjectType::eAccelerationStructureKHR : return "AccelerationStructureKHR";
  5928. case ObjectType::eValidationCacheEXT : return "ValidationCacheEXT";
  5929. case ObjectType::eAccelerationStructureNV : return "AccelerationStructureNV";
  5930. case ObjectType::ePerformanceConfigurationINTEL : return "PerformanceConfigurationINTEL";
  5931. case ObjectType::eDeferredOperationKHR : return "DeferredOperationKHR";
  5932. case ObjectType::eIndirectCommandsLayoutNV : return "IndirectCommandsLayoutNV";
  5933. case ObjectType::ePrivateDataSlotEXT : return "PrivateDataSlotEXT";
  5934. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5935. }
  5936. }
  5937. template<ObjectType value>
  5938. struct cpp_type
  5939. {};
  5940. enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
  5941. {
  5942. eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
  5943. eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
  5944. eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
  5945. eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT};
  5946. using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
  5947. VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlagBits value )
  5948. {
  5949. switch ( value )
  5950. {
  5951. case PeerMemoryFeatureFlagBits::eCopySrc : return "CopySrc";
  5952. case PeerMemoryFeatureFlagBits::eCopyDst : return "CopyDst";
  5953. case PeerMemoryFeatureFlagBits::eGenericSrc : return "GenericSrc";
  5954. case PeerMemoryFeatureFlagBits::eGenericDst : return "GenericDst";
  5955. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5956. }
  5957. }
  5958. enum class PerformanceConfigurationTypeINTEL
  5959. {
  5960. eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL};
  5961. VULKAN_HPP_INLINE std::string to_string( PerformanceConfigurationTypeINTEL value )
  5962. {
  5963. switch ( value )
  5964. {
  5965. case PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated : return "CommandQueueMetricsDiscoveryActivated";
  5966. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5967. }
  5968. }
  5969. enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
  5970. {
  5971. ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
  5972. eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR};
  5973. VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagBitsKHR value )
  5974. {
  5975. switch ( value )
  5976. {
  5977. case PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting : return "PerformanceImpacting";
  5978. case PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted : return "ConcurrentlyImpacted";
  5979. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5980. }
  5981. }
  5982. enum class PerformanceCounterScopeKHR
  5983. {
  5984. eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
  5985. eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
  5986. eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
  5987. eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
  5988. eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR,
  5989. eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR};
  5990. VULKAN_HPP_INLINE std::string to_string( PerformanceCounterScopeKHR value )
  5991. {
  5992. switch ( value )
  5993. {
  5994. case PerformanceCounterScopeKHR::eCommandBuffer : return "CommandBuffer";
  5995. case PerformanceCounterScopeKHR::eRenderPass : return "RenderPass";
  5996. case PerformanceCounterScopeKHR::eCommand : return "Command";
  5997. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  5998. }
  5999. }
  6000. enum class PerformanceCounterStorageKHR
  6001. {
  6002. eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
  6003. eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
  6004. eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
  6005. eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
  6006. eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
  6007. eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR};
  6008. VULKAN_HPP_INLINE std::string to_string( PerformanceCounterStorageKHR value )
  6009. {
  6010. switch ( value )
  6011. {
  6012. case PerformanceCounterStorageKHR::eInt32 : return "Int32";
  6013. case PerformanceCounterStorageKHR::eInt64 : return "Int64";
  6014. case PerformanceCounterStorageKHR::eUint32 : return "Uint32";
  6015. case PerformanceCounterStorageKHR::eUint64 : return "Uint64";
  6016. case PerformanceCounterStorageKHR::eFloat32 : return "Float32";
  6017. case PerformanceCounterStorageKHR::eFloat64 : return "Float64";
  6018. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6019. }
  6020. }
  6021. enum class PerformanceCounterUnitKHR
  6022. {
  6023. eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
  6024. ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
  6025. eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
  6026. eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
  6027. eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
  6028. eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
  6029. eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
  6030. eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
  6031. eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
  6032. eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
  6033. eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR};
  6034. VULKAN_HPP_INLINE std::string to_string( PerformanceCounterUnitKHR value )
  6035. {
  6036. switch ( value )
  6037. {
  6038. case PerformanceCounterUnitKHR::eGeneric : return "Generic";
  6039. case PerformanceCounterUnitKHR::ePercentage : return "Percentage";
  6040. case PerformanceCounterUnitKHR::eNanoseconds : return "Nanoseconds";
  6041. case PerformanceCounterUnitKHR::eBytes : return "Bytes";
  6042. case PerformanceCounterUnitKHR::eBytesPerSecond : return "BytesPerSecond";
  6043. case PerformanceCounterUnitKHR::eKelvin : return "Kelvin";
  6044. case PerformanceCounterUnitKHR::eWatts : return "Watts";
  6045. case PerformanceCounterUnitKHR::eVolts : return "Volts";
  6046. case PerformanceCounterUnitKHR::eAmps : return "Amps";
  6047. case PerformanceCounterUnitKHR::eHertz : return "Hertz";
  6048. case PerformanceCounterUnitKHR::eCycles : return "Cycles";
  6049. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6050. }
  6051. }
  6052. enum class PerformanceOverrideTypeINTEL
  6053. {
  6054. eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
  6055. eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL};
  6056. VULKAN_HPP_INLINE std::string to_string( PerformanceOverrideTypeINTEL value )
  6057. {
  6058. switch ( value )
  6059. {
  6060. case PerformanceOverrideTypeINTEL::eNullHardware : return "NullHardware";
  6061. case PerformanceOverrideTypeINTEL::eFlushGpuCaches : return "FlushGpuCaches";
  6062. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6063. }
  6064. }
  6065. enum class PerformanceParameterTypeINTEL
  6066. {
  6067. eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
  6068. eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL};
  6069. VULKAN_HPP_INLINE std::string to_string( PerformanceParameterTypeINTEL value )
  6070. {
  6071. switch ( value )
  6072. {
  6073. case PerformanceParameterTypeINTEL::eHwCountersSupported : return "HwCountersSupported";
  6074. case PerformanceParameterTypeINTEL::eStreamMarkerValidBits : return "StreamMarkerValidBits";
  6075. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6076. }
  6077. }
  6078. enum class PerformanceValueTypeINTEL
  6079. {
  6080. eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
  6081. eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
  6082. eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
  6083. eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
  6084. eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL};
  6085. VULKAN_HPP_INLINE std::string to_string( PerformanceValueTypeINTEL value )
  6086. {
  6087. switch ( value )
  6088. {
  6089. case PerformanceValueTypeINTEL::eUint32 : return "Uint32";
  6090. case PerformanceValueTypeINTEL::eUint64 : return "Uint64";
  6091. case PerformanceValueTypeINTEL::eFloat : return "Float";
  6092. case PerformanceValueTypeINTEL::eBool : return "Bool";
  6093. case PerformanceValueTypeINTEL::eString : return "String";
  6094. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6095. }
  6096. }
  6097. enum class PhysicalDeviceType
  6098. {
  6099. eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
  6100. eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
  6101. eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
  6102. eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
  6103. eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU};
  6104. VULKAN_HPP_INLINE std::string to_string( PhysicalDeviceType value )
  6105. {
  6106. switch ( value )
  6107. {
  6108. case PhysicalDeviceType::eOther : return "Other";
  6109. case PhysicalDeviceType::eIntegratedGpu : return "IntegratedGpu";
  6110. case PhysicalDeviceType::eDiscreteGpu : return "DiscreteGpu";
  6111. case PhysicalDeviceType::eVirtualGpu : return "VirtualGpu";
  6112. case PhysicalDeviceType::eCpu : return "Cpu";
  6113. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6114. }
  6115. }
  6116. enum class PipelineBindPoint
  6117. {
  6118. eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
  6119. eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
  6120. eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
  6121. eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV};
  6122. VULKAN_HPP_INLINE std::string to_string( PipelineBindPoint value )
  6123. {
  6124. switch ( value )
  6125. {
  6126. case PipelineBindPoint::eGraphics : return "Graphics";
  6127. case PipelineBindPoint::eCompute : return "Compute";
  6128. case PipelineBindPoint::eRayTracingKHR : return "RayTracingKHR";
  6129. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6130. }
  6131. }
  6132. enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
  6133. {
  6134. eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT};
  6135. VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlagBits value )
  6136. {
  6137. switch ( value )
  6138. {
  6139. case PipelineCacheCreateFlagBits::eExternallySynchronizedEXT : return "ExternallySynchronizedEXT";
  6140. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6141. }
  6142. }
  6143. enum class PipelineCacheHeaderVersion
  6144. {
  6145. eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE};
  6146. VULKAN_HPP_INLINE std::string to_string( PipelineCacheHeaderVersion value )
  6147. {
  6148. switch ( value )
  6149. {
  6150. case PipelineCacheHeaderVersion::eOne : return "One";
  6151. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6152. }
  6153. }
  6154. enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
  6155. {};
  6156. VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagBitsAMD )
  6157. {
  6158. return "(void)";
  6159. }
  6160. enum class PipelineCreateFlagBits : VkPipelineCreateFlags
  6161. {
  6162. eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
  6163. eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
  6164. eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
  6165. eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
  6166. eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
  6167. eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
  6168. eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
  6169. eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
  6170. eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
  6171. eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
  6172. eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
  6173. eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
  6174. eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
  6175. eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
  6176. eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
  6177. eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
  6178. eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
  6179. eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
  6180. eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
  6181. eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
  6182. eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR};
  6183. VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlagBits value )
  6184. {
  6185. switch ( value )
  6186. {
  6187. case PipelineCreateFlagBits::eDisableOptimization : return "DisableOptimization";
  6188. case PipelineCreateFlagBits::eAllowDerivatives : return "AllowDerivatives";
  6189. case PipelineCreateFlagBits::eDerivative : return "Derivative";
  6190. case PipelineCreateFlagBits::eViewIndexFromDeviceIndex : return "ViewIndexFromDeviceIndex";
  6191. case PipelineCreateFlagBits::eDispatchBase : return "DispatchBase";
  6192. case PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR : return "RayTracingNoNullAnyHitShadersKHR";
  6193. case PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR : return "RayTracingNoNullClosestHitShadersKHR";
  6194. case PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR : return "RayTracingNoNullMissShadersKHR";
  6195. case PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR : return "RayTracingNoNullIntersectionShadersKHR";
  6196. case PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR : return "RayTracingSkipTrianglesKHR";
  6197. case PipelineCreateFlagBits::eRayTracingSkipAabbsKHR : return "RayTracingSkipAabbsKHR";
  6198. case PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR : return "RayTracingShaderGroupHandleCaptureReplayKHR";
  6199. case PipelineCreateFlagBits::eDeferCompileNV : return "DeferCompileNV";
  6200. case PipelineCreateFlagBits::eCaptureStatisticsKHR : return "CaptureStatisticsKHR";
  6201. case PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR : return "CaptureInternalRepresentationsKHR";
  6202. case PipelineCreateFlagBits::eIndirectBindableNV : return "IndirectBindableNV";
  6203. case PipelineCreateFlagBits::eLibraryKHR : return "LibraryKHR";
  6204. case PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT : return "FailOnPipelineCompileRequiredEXT";
  6205. case PipelineCreateFlagBits::eEarlyReturnOnFailureEXT : return "EarlyReturnOnFailureEXT";
  6206. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6207. }
  6208. }
  6209. enum class PipelineCreationFeedbackFlagBitsEXT : VkPipelineCreationFeedbackFlagsEXT
  6210. {
  6211. eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT,
  6212. eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT,
  6213. eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT};
  6214. VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagBitsEXT value )
  6215. {
  6216. switch ( value )
  6217. {
  6218. case PipelineCreationFeedbackFlagBitsEXT::eValid : return "Valid";
  6219. case PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit : return "ApplicationPipelineCacheHit";
  6220. case PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration : return "BasePipelineAcceleration";
  6221. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6222. }
  6223. }
  6224. enum class PipelineExecutableStatisticFormatKHR
  6225. {
  6226. eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
  6227. eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
  6228. eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
  6229. eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR};
  6230. VULKAN_HPP_INLINE std::string to_string( PipelineExecutableStatisticFormatKHR value )
  6231. {
  6232. switch ( value )
  6233. {
  6234. case PipelineExecutableStatisticFormatKHR::eBool32 : return "Bool32";
  6235. case PipelineExecutableStatisticFormatKHR::eInt64 : return "Int64";
  6236. case PipelineExecutableStatisticFormatKHR::eUint64 : return "Uint64";
  6237. case PipelineExecutableStatisticFormatKHR::eFloat64 : return "Float64";
  6238. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6239. }
  6240. }
  6241. enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
  6242. {
  6243. eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
  6244. eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT};
  6245. VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlagBits value )
  6246. {
  6247. switch ( value )
  6248. {
  6249. case PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT : return "AllowVaryingSubgroupSizeEXT";
  6250. case PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT : return "RequireFullSubgroupsEXT";
  6251. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6252. }
  6253. }
  6254. enum class PipelineStageFlagBits : VkPipelineStageFlags
  6255. {
  6256. eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
  6257. eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
  6258. eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
  6259. eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
  6260. eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
  6261. eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
  6262. eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
  6263. eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
  6264. eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
  6265. eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
  6266. eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  6267. eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
  6268. eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
  6269. eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
  6270. eHost = VK_PIPELINE_STAGE_HOST_BIT,
  6271. eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
  6272. eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  6273. eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
  6274. eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
  6275. eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
  6276. eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
  6277. eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
  6278. eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV,
  6279. eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
  6280. eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
  6281. eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
  6282. eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
  6283. eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
  6284. eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV};
  6285. VULKAN_HPP_INLINE std::string to_string( PipelineStageFlagBits value )
  6286. {
  6287. switch ( value )
  6288. {
  6289. case PipelineStageFlagBits::eTopOfPipe : return "TopOfPipe";
  6290. case PipelineStageFlagBits::eDrawIndirect : return "DrawIndirect";
  6291. case PipelineStageFlagBits::eVertexInput : return "VertexInput";
  6292. case PipelineStageFlagBits::eVertexShader : return "VertexShader";
  6293. case PipelineStageFlagBits::eTessellationControlShader : return "TessellationControlShader";
  6294. case PipelineStageFlagBits::eTessellationEvaluationShader : return "TessellationEvaluationShader";
  6295. case PipelineStageFlagBits::eGeometryShader : return "GeometryShader";
  6296. case PipelineStageFlagBits::eFragmentShader : return "FragmentShader";
  6297. case PipelineStageFlagBits::eEarlyFragmentTests : return "EarlyFragmentTests";
  6298. case PipelineStageFlagBits::eLateFragmentTests : return "LateFragmentTests";
  6299. case PipelineStageFlagBits::eColorAttachmentOutput : return "ColorAttachmentOutput";
  6300. case PipelineStageFlagBits::eComputeShader : return "ComputeShader";
  6301. case PipelineStageFlagBits::eTransfer : return "Transfer";
  6302. case PipelineStageFlagBits::eBottomOfPipe : return "BottomOfPipe";
  6303. case PipelineStageFlagBits::eHost : return "Host";
  6304. case PipelineStageFlagBits::eAllGraphics : return "AllGraphics";
  6305. case PipelineStageFlagBits::eAllCommands : return "AllCommands";
  6306. case PipelineStageFlagBits::eTransformFeedbackEXT : return "TransformFeedbackEXT";
  6307. case PipelineStageFlagBits::eConditionalRenderingEXT : return "ConditionalRenderingEXT";
  6308. case PipelineStageFlagBits::eAccelerationStructureBuildKHR : return "AccelerationStructureBuildKHR";
  6309. case PipelineStageFlagBits::eRayTracingShaderKHR : return "RayTracingShaderKHR";
  6310. case PipelineStageFlagBits::eShadingRateImageNV : return "ShadingRateImageNV";
  6311. case PipelineStageFlagBits::eTaskShaderNV : return "TaskShaderNV";
  6312. case PipelineStageFlagBits::eMeshShaderNV : return "MeshShaderNV";
  6313. case PipelineStageFlagBits::eFragmentDensityProcessEXT : return "FragmentDensityProcessEXT";
  6314. case PipelineStageFlagBits::eCommandPreprocessNV : return "CommandPreprocessNV";
  6315. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6316. }
  6317. }
  6318. enum class PointClippingBehavior
  6319. {
  6320. eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
  6321. eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY};
  6322. using PointClippingBehaviorKHR = PointClippingBehavior;
  6323. VULKAN_HPP_INLINE std::string to_string( PointClippingBehavior value )
  6324. {
  6325. switch ( value )
  6326. {
  6327. case PointClippingBehavior::eAllClipPlanes : return "AllClipPlanes";
  6328. case PointClippingBehavior::eUserClipPlanesOnly : return "UserClipPlanesOnly";
  6329. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6330. }
  6331. }
  6332. enum class PolygonMode
  6333. {
  6334. eFill = VK_POLYGON_MODE_FILL,
  6335. eLine = VK_POLYGON_MODE_LINE,
  6336. ePoint = VK_POLYGON_MODE_POINT,
  6337. eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV};
  6338. VULKAN_HPP_INLINE std::string to_string( PolygonMode value )
  6339. {
  6340. switch ( value )
  6341. {
  6342. case PolygonMode::eFill : return "Fill";
  6343. case PolygonMode::eLine : return "Line";
  6344. case PolygonMode::ePoint : return "Point";
  6345. case PolygonMode::eFillRectangleNV : return "FillRectangleNV";
  6346. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6347. }
  6348. }
  6349. enum class PresentModeKHR
  6350. {
  6351. eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
  6352. eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
  6353. eFifo = VK_PRESENT_MODE_FIFO_KHR,
  6354. eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
  6355. eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
  6356. eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR};
  6357. VULKAN_HPP_INLINE std::string to_string( PresentModeKHR value )
  6358. {
  6359. switch ( value )
  6360. {
  6361. case PresentModeKHR::eImmediate : return "Immediate";
  6362. case PresentModeKHR::eMailbox : return "Mailbox";
  6363. case PresentModeKHR::eFifo : return "Fifo";
  6364. case PresentModeKHR::eFifoRelaxed : return "FifoRelaxed";
  6365. case PresentModeKHR::eSharedDemandRefresh : return "SharedDemandRefresh";
  6366. case PresentModeKHR::eSharedContinuousRefresh : return "SharedContinuousRefresh";
  6367. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6368. }
  6369. }
  6370. enum class PrimitiveTopology
  6371. {
  6372. ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
  6373. eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
  6374. eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
  6375. eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
  6376. eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
  6377. eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
  6378. eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
  6379. eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
  6380. eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
  6381. eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
  6382. ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST};
  6383. VULKAN_HPP_INLINE std::string to_string( PrimitiveTopology value )
  6384. {
  6385. switch ( value )
  6386. {
  6387. case PrimitiveTopology::ePointList : return "PointList";
  6388. case PrimitiveTopology::eLineList : return "LineList";
  6389. case PrimitiveTopology::eLineStrip : return "LineStrip";
  6390. case PrimitiveTopology::eTriangleList : return "TriangleList";
  6391. case PrimitiveTopology::eTriangleStrip : return "TriangleStrip";
  6392. case PrimitiveTopology::eTriangleFan : return "TriangleFan";
  6393. case PrimitiveTopology::eLineListWithAdjacency : return "LineListWithAdjacency";
  6394. case PrimitiveTopology::eLineStripWithAdjacency : return "LineStripWithAdjacency";
  6395. case PrimitiveTopology::eTriangleListWithAdjacency : return "TriangleListWithAdjacency";
  6396. case PrimitiveTopology::eTriangleStripWithAdjacency : return "TriangleStripWithAdjacency";
  6397. case PrimitiveTopology::ePatchList : return "PatchList";
  6398. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6399. }
  6400. }
  6401. enum class PrivateDataSlotCreateFlagBitsEXT : VkPrivateDataSlotCreateFlagsEXT
  6402. {};
  6403. VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagBitsEXT )
  6404. {
  6405. return "(void)";
  6406. }
  6407. enum class QueryControlFlagBits : VkQueryControlFlags
  6408. {
  6409. ePrecise = VK_QUERY_CONTROL_PRECISE_BIT};
  6410. VULKAN_HPP_INLINE std::string to_string( QueryControlFlagBits value )
  6411. {
  6412. switch ( value )
  6413. {
  6414. case QueryControlFlagBits::ePrecise : return "Precise";
  6415. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6416. }
  6417. }
  6418. enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
  6419. {
  6420. eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
  6421. eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
  6422. eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
  6423. eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
  6424. eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
  6425. eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
  6426. eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
  6427. eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
  6428. eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
  6429. eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
  6430. eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT};
  6431. VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlagBits value )
  6432. {
  6433. switch ( value )
  6434. {
  6435. case QueryPipelineStatisticFlagBits::eInputAssemblyVertices : return "InputAssemblyVertices";
  6436. case QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives : return "InputAssemblyPrimitives";
  6437. case QueryPipelineStatisticFlagBits::eVertexShaderInvocations : return "VertexShaderInvocations";
  6438. case QueryPipelineStatisticFlagBits::eGeometryShaderInvocations : return "GeometryShaderInvocations";
  6439. case QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives : return "GeometryShaderPrimitives";
  6440. case QueryPipelineStatisticFlagBits::eClippingInvocations : return "ClippingInvocations";
  6441. case QueryPipelineStatisticFlagBits::eClippingPrimitives : return "ClippingPrimitives";
  6442. case QueryPipelineStatisticFlagBits::eFragmentShaderInvocations : return "FragmentShaderInvocations";
  6443. case QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches : return "TessellationControlShaderPatches";
  6444. case QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations : return "TessellationEvaluationShaderInvocations";
  6445. case QueryPipelineStatisticFlagBits::eComputeShaderInvocations : return "ComputeShaderInvocations";
  6446. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6447. }
  6448. }
  6449. enum class QueryPoolCreateFlagBits
  6450. {};
  6451. VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlagBits )
  6452. {
  6453. return "(void)";
  6454. }
  6455. enum class QueryPoolSamplingModeINTEL
  6456. {
  6457. eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL};
  6458. VULKAN_HPP_INLINE std::string to_string( QueryPoolSamplingModeINTEL value )
  6459. {
  6460. switch ( value )
  6461. {
  6462. case QueryPoolSamplingModeINTEL::eManual : return "Manual";
  6463. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6464. }
  6465. }
  6466. enum class QueryResultFlagBits : VkQueryResultFlags
  6467. {
  6468. e64 = VK_QUERY_RESULT_64_BIT,
  6469. eWait = VK_QUERY_RESULT_WAIT_BIT,
  6470. eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
  6471. ePartial = VK_QUERY_RESULT_PARTIAL_BIT};
  6472. VULKAN_HPP_INLINE std::string to_string( QueryResultFlagBits value )
  6473. {
  6474. switch ( value )
  6475. {
  6476. case QueryResultFlagBits::e64 : return "64";
  6477. case QueryResultFlagBits::eWait : return "Wait";
  6478. case QueryResultFlagBits::eWithAvailability : return "WithAvailability";
  6479. case QueryResultFlagBits::ePartial : return "Partial";
  6480. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6481. }
  6482. }
  6483. enum class QueryType
  6484. {
  6485. eOcclusion = VK_QUERY_TYPE_OCCLUSION,
  6486. ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
  6487. eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
  6488. eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
  6489. ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
  6490. eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
  6491. eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
  6492. eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
  6493. ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL};
  6494. VULKAN_HPP_INLINE std::string to_string( QueryType value )
  6495. {
  6496. switch ( value )
  6497. {
  6498. case QueryType::eOcclusion : return "Occlusion";
  6499. case QueryType::ePipelineStatistics : return "PipelineStatistics";
  6500. case QueryType::eTimestamp : return "Timestamp";
  6501. case QueryType::eTransformFeedbackStreamEXT : return "TransformFeedbackStreamEXT";
  6502. case QueryType::ePerformanceQueryKHR : return "PerformanceQueryKHR";
  6503. case QueryType::eAccelerationStructureCompactedSizeKHR : return "AccelerationStructureCompactedSizeKHR";
  6504. case QueryType::eAccelerationStructureSerializationSizeKHR : return "AccelerationStructureSerializationSizeKHR";
  6505. case QueryType::eAccelerationStructureCompactedSizeNV : return "AccelerationStructureCompactedSizeNV";
  6506. case QueryType::ePerformanceQueryINTEL : return "PerformanceQueryINTEL";
  6507. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6508. }
  6509. }
  6510. enum class QueueFlagBits : VkQueueFlags
  6511. {
  6512. eGraphics = VK_QUEUE_GRAPHICS_BIT,
  6513. eCompute = VK_QUEUE_COMPUTE_BIT,
  6514. eTransfer = VK_QUEUE_TRANSFER_BIT,
  6515. eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
  6516. eProtected = VK_QUEUE_PROTECTED_BIT};
  6517. VULKAN_HPP_INLINE std::string to_string( QueueFlagBits value )
  6518. {
  6519. switch ( value )
  6520. {
  6521. case QueueFlagBits::eGraphics : return "Graphics";
  6522. case QueueFlagBits::eCompute : return "Compute";
  6523. case QueueFlagBits::eTransfer : return "Transfer";
  6524. case QueueFlagBits::eSparseBinding : return "SparseBinding";
  6525. case QueueFlagBits::eProtected : return "Protected";
  6526. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6527. }
  6528. }
  6529. enum class QueueGlobalPriorityEXT
  6530. {
  6531. eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT,
  6532. eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT,
  6533. eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT,
  6534. eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT};
  6535. VULKAN_HPP_INLINE std::string to_string( QueueGlobalPriorityEXT value )
  6536. {
  6537. switch ( value )
  6538. {
  6539. case QueueGlobalPriorityEXT::eLow : return "Low";
  6540. case QueueGlobalPriorityEXT::eMedium : return "Medium";
  6541. case QueueGlobalPriorityEXT::eHigh : return "High";
  6542. case QueueGlobalPriorityEXT::eRealtime : return "Realtime";
  6543. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6544. }
  6545. }
  6546. enum class RasterizationOrderAMD
  6547. {
  6548. eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
  6549. eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD};
  6550. VULKAN_HPP_INLINE std::string to_string( RasterizationOrderAMD value )
  6551. {
  6552. switch ( value )
  6553. {
  6554. case RasterizationOrderAMD::eStrict : return "Strict";
  6555. case RasterizationOrderAMD::eRelaxed : return "Relaxed";
  6556. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6557. }
  6558. }
  6559. enum class RayTracingShaderGroupTypeKHR
  6560. {
  6561. eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
  6562. eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
  6563. eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR};
  6564. using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
  6565. VULKAN_HPP_INLINE std::string to_string( RayTracingShaderGroupTypeKHR value )
  6566. {
  6567. switch ( value )
  6568. {
  6569. case RayTracingShaderGroupTypeKHR::eGeneral : return "General";
  6570. case RayTracingShaderGroupTypeKHR::eTrianglesHitGroup : return "TrianglesHitGroup";
  6571. case RayTracingShaderGroupTypeKHR::eProceduralHitGroup : return "ProceduralHitGroup";
  6572. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6573. }
  6574. }
  6575. enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
  6576. {
  6577. eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM};
  6578. VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlagBits value )
  6579. {
  6580. switch ( value )
  6581. {
  6582. case RenderPassCreateFlagBits::eTransformQCOM : return "TransformQCOM";
  6583. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6584. }
  6585. }
  6586. enum class ResolveModeFlagBits : VkResolveModeFlags
  6587. {
  6588. eNone = VK_RESOLVE_MODE_NONE,
  6589. eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
  6590. eAverage = VK_RESOLVE_MODE_AVERAGE_BIT,
  6591. eMin = VK_RESOLVE_MODE_MIN_BIT,
  6592. eMax = VK_RESOLVE_MODE_MAX_BIT};
  6593. using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
  6594. VULKAN_HPP_INLINE std::string to_string( ResolveModeFlagBits value )
  6595. {
  6596. switch ( value )
  6597. {
  6598. case ResolveModeFlagBits::eNone : return "None";
  6599. case ResolveModeFlagBits::eSampleZero : return "SampleZero";
  6600. case ResolveModeFlagBits::eAverage : return "Average";
  6601. case ResolveModeFlagBits::eMin : return "Min";
  6602. case ResolveModeFlagBits::eMax : return "Max";
  6603. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6604. }
  6605. }
  6606. enum class Result
  6607. {
  6608. eSuccess = VK_SUCCESS,
  6609. eNotReady = VK_NOT_READY,
  6610. eTimeout = VK_TIMEOUT,
  6611. eEventSet = VK_EVENT_SET,
  6612. eEventReset = VK_EVENT_RESET,
  6613. eIncomplete = VK_INCOMPLETE,
  6614. eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
  6615. eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
  6616. eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
  6617. eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
  6618. eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
  6619. eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
  6620. eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
  6621. eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
  6622. eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
  6623. eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
  6624. eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
  6625. eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
  6626. eErrorUnknown = VK_ERROR_UNKNOWN,
  6627. eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
  6628. eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
  6629. eErrorFragmentation = VK_ERROR_FRAGMENTATION,
  6630. eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
  6631. eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
  6632. eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
  6633. eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
  6634. eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
  6635. eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
  6636. eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
  6637. eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
  6638. eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
  6639. eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
  6640. #ifdef VK_USE_PLATFORM_WIN32_KHR
  6641. eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
  6642. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  6643. eThreadIdleKHR = VK_THREAD_IDLE_KHR,
  6644. eThreadDoneKHR = VK_THREAD_DONE_KHR,
  6645. eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
  6646. eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
  6647. ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT,
  6648. eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
  6649. eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
  6650. eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
  6651. eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
  6652. eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
  6653. eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT};
  6654. VULKAN_HPP_INLINE std::string to_string( Result value )
  6655. {
  6656. switch ( value )
  6657. {
  6658. case Result::eSuccess : return "Success";
  6659. case Result::eNotReady : return "NotReady";
  6660. case Result::eTimeout : return "Timeout";
  6661. case Result::eEventSet : return "EventSet";
  6662. case Result::eEventReset : return "EventReset";
  6663. case Result::eIncomplete : return "Incomplete";
  6664. case Result::eErrorOutOfHostMemory : return "ErrorOutOfHostMemory";
  6665. case Result::eErrorOutOfDeviceMemory : return "ErrorOutOfDeviceMemory";
  6666. case Result::eErrorInitializationFailed : return "ErrorInitializationFailed";
  6667. case Result::eErrorDeviceLost : return "ErrorDeviceLost";
  6668. case Result::eErrorMemoryMapFailed : return "ErrorMemoryMapFailed";
  6669. case Result::eErrorLayerNotPresent : return "ErrorLayerNotPresent";
  6670. case Result::eErrorExtensionNotPresent : return "ErrorExtensionNotPresent";
  6671. case Result::eErrorFeatureNotPresent : return "ErrorFeatureNotPresent";
  6672. case Result::eErrorIncompatibleDriver : return "ErrorIncompatibleDriver";
  6673. case Result::eErrorTooManyObjects : return "ErrorTooManyObjects";
  6674. case Result::eErrorFormatNotSupported : return "ErrorFormatNotSupported";
  6675. case Result::eErrorFragmentedPool : return "ErrorFragmentedPool";
  6676. case Result::eErrorUnknown : return "ErrorUnknown";
  6677. case Result::eErrorOutOfPoolMemory : return "ErrorOutOfPoolMemory";
  6678. case Result::eErrorInvalidExternalHandle : return "ErrorInvalidExternalHandle";
  6679. case Result::eErrorFragmentation : return "ErrorFragmentation";
  6680. case Result::eErrorInvalidOpaqueCaptureAddress : return "ErrorInvalidOpaqueCaptureAddress";
  6681. case Result::eErrorSurfaceLostKHR : return "ErrorSurfaceLostKHR";
  6682. case Result::eErrorNativeWindowInUseKHR : return "ErrorNativeWindowInUseKHR";
  6683. case Result::eSuboptimalKHR : return "SuboptimalKHR";
  6684. case Result::eErrorOutOfDateKHR : return "ErrorOutOfDateKHR";
  6685. case Result::eErrorIncompatibleDisplayKHR : return "ErrorIncompatibleDisplayKHR";
  6686. case Result::eErrorValidationFailedEXT : return "ErrorValidationFailedEXT";
  6687. case Result::eErrorInvalidShaderNV : return "ErrorInvalidShaderNV";
  6688. case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT : return "ErrorInvalidDrmFormatModifierPlaneLayoutEXT";
  6689. case Result::eErrorNotPermittedEXT : return "ErrorNotPermittedEXT";
  6690. #ifdef VK_USE_PLATFORM_WIN32_KHR
  6691. case Result::eErrorFullScreenExclusiveModeLostEXT : return "ErrorFullScreenExclusiveModeLostEXT";
  6692. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  6693. case Result::eThreadIdleKHR : return "ThreadIdleKHR";
  6694. case Result::eThreadDoneKHR : return "ThreadDoneKHR";
  6695. case Result::eOperationDeferredKHR : return "OperationDeferredKHR";
  6696. case Result::eOperationNotDeferredKHR : return "OperationNotDeferredKHR";
  6697. case Result::ePipelineCompileRequiredEXT : return "PipelineCompileRequiredEXT";
  6698. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6699. }
  6700. }
  6701. enum class SampleCountFlagBits : VkSampleCountFlags
  6702. {
  6703. e1 = VK_SAMPLE_COUNT_1_BIT,
  6704. e2 = VK_SAMPLE_COUNT_2_BIT,
  6705. e4 = VK_SAMPLE_COUNT_4_BIT,
  6706. e8 = VK_SAMPLE_COUNT_8_BIT,
  6707. e16 = VK_SAMPLE_COUNT_16_BIT,
  6708. e32 = VK_SAMPLE_COUNT_32_BIT,
  6709. e64 = VK_SAMPLE_COUNT_64_BIT};
  6710. VULKAN_HPP_INLINE std::string to_string( SampleCountFlagBits value )
  6711. {
  6712. switch ( value )
  6713. {
  6714. case SampleCountFlagBits::e1 : return "1";
  6715. case SampleCountFlagBits::e2 : return "2";
  6716. case SampleCountFlagBits::e4 : return "4";
  6717. case SampleCountFlagBits::e8 : return "8";
  6718. case SampleCountFlagBits::e16 : return "16";
  6719. case SampleCountFlagBits::e32 : return "32";
  6720. case SampleCountFlagBits::e64 : return "64";
  6721. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6722. }
  6723. }
  6724. enum class SamplerAddressMode
  6725. {
  6726. eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  6727. eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
  6728. eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
  6729. eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
  6730. eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
  6731. eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR};
  6732. VULKAN_HPP_INLINE std::string to_string( SamplerAddressMode value )
  6733. {
  6734. switch ( value )
  6735. {
  6736. case SamplerAddressMode::eRepeat : return "Repeat";
  6737. case SamplerAddressMode::eMirroredRepeat : return "MirroredRepeat";
  6738. case SamplerAddressMode::eClampToEdge : return "ClampToEdge";
  6739. case SamplerAddressMode::eClampToBorder : return "ClampToBorder";
  6740. case SamplerAddressMode::eMirrorClampToEdge : return "MirrorClampToEdge";
  6741. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6742. }
  6743. }
  6744. enum class SamplerCreateFlagBits : VkSamplerCreateFlags
  6745. {
  6746. eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
  6747. eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT};
  6748. VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlagBits value )
  6749. {
  6750. switch ( value )
  6751. {
  6752. case SamplerCreateFlagBits::eSubsampledEXT : return "SubsampledEXT";
  6753. case SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT : return "SubsampledCoarseReconstructionEXT";
  6754. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6755. }
  6756. }
  6757. enum class SamplerMipmapMode
  6758. {
  6759. eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
  6760. eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR};
  6761. VULKAN_HPP_INLINE std::string to_string( SamplerMipmapMode value )
  6762. {
  6763. switch ( value )
  6764. {
  6765. case SamplerMipmapMode::eNearest : return "Nearest";
  6766. case SamplerMipmapMode::eLinear : return "Linear";
  6767. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6768. }
  6769. }
  6770. enum class SamplerReductionMode
  6771. {
  6772. eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
  6773. eMin = VK_SAMPLER_REDUCTION_MODE_MIN,
  6774. eMax = VK_SAMPLER_REDUCTION_MODE_MAX};
  6775. using SamplerReductionModeEXT = SamplerReductionMode;
  6776. VULKAN_HPP_INLINE std::string to_string( SamplerReductionMode value )
  6777. {
  6778. switch ( value )
  6779. {
  6780. case SamplerReductionMode::eWeightedAverage : return "WeightedAverage";
  6781. case SamplerReductionMode::eMin : return "Min";
  6782. case SamplerReductionMode::eMax : return "Max";
  6783. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6784. }
  6785. }
  6786. enum class SamplerYcbcrModelConversion
  6787. {
  6788. eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
  6789. eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
  6790. eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
  6791. eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
  6792. eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020};
  6793. using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
  6794. VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrModelConversion value )
  6795. {
  6796. switch ( value )
  6797. {
  6798. case SamplerYcbcrModelConversion::eRgbIdentity : return "RgbIdentity";
  6799. case SamplerYcbcrModelConversion::eYcbcrIdentity : return "YcbcrIdentity";
  6800. case SamplerYcbcrModelConversion::eYcbcr709 : return "Ycbcr709";
  6801. case SamplerYcbcrModelConversion::eYcbcr601 : return "Ycbcr601";
  6802. case SamplerYcbcrModelConversion::eYcbcr2020 : return "Ycbcr2020";
  6803. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6804. }
  6805. }
  6806. enum class SamplerYcbcrRange
  6807. {
  6808. eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
  6809. eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW};
  6810. using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
  6811. VULKAN_HPP_INLINE std::string to_string( SamplerYcbcrRange value )
  6812. {
  6813. switch ( value )
  6814. {
  6815. case SamplerYcbcrRange::eItuFull : return "ItuFull";
  6816. case SamplerYcbcrRange::eItuNarrow : return "ItuNarrow";
  6817. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6818. }
  6819. }
  6820. enum class ScopeNV
  6821. {
  6822. eDevice = VK_SCOPE_DEVICE_NV,
  6823. eWorkgroup = VK_SCOPE_WORKGROUP_NV,
  6824. eSubgroup = VK_SCOPE_SUBGROUP_NV,
  6825. eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV};
  6826. VULKAN_HPP_INLINE std::string to_string( ScopeNV value )
  6827. {
  6828. switch ( value )
  6829. {
  6830. case ScopeNV::eDevice : return "Device";
  6831. case ScopeNV::eWorkgroup : return "Workgroup";
  6832. case ScopeNV::eSubgroup : return "Subgroup";
  6833. case ScopeNV::eQueueFamily : return "QueueFamily";
  6834. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6835. }
  6836. }
  6837. enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
  6838. {
  6839. eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT};
  6840. using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
  6841. VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlagBits value )
  6842. {
  6843. switch ( value )
  6844. {
  6845. case SemaphoreImportFlagBits::eTemporary : return "Temporary";
  6846. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6847. }
  6848. }
  6849. enum class SemaphoreType
  6850. {
  6851. eBinary = VK_SEMAPHORE_TYPE_BINARY,
  6852. eTimeline = VK_SEMAPHORE_TYPE_TIMELINE};
  6853. using SemaphoreTypeKHR = SemaphoreType;
  6854. VULKAN_HPP_INLINE std::string to_string( SemaphoreType value )
  6855. {
  6856. switch ( value )
  6857. {
  6858. case SemaphoreType::eBinary : return "Binary";
  6859. case SemaphoreType::eTimeline : return "Timeline";
  6860. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6861. }
  6862. }
  6863. enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
  6864. {
  6865. eAny = VK_SEMAPHORE_WAIT_ANY_BIT};
  6866. using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
  6867. VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlagBits value )
  6868. {
  6869. switch ( value )
  6870. {
  6871. case SemaphoreWaitFlagBits::eAny : return "Any";
  6872. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6873. }
  6874. }
  6875. enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
  6876. {};
  6877. VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagBitsAMD )
  6878. {
  6879. return "(void)";
  6880. }
  6881. enum class ShaderFloatControlsIndependence
  6882. {
  6883. e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
  6884. eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
  6885. eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE};
  6886. using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
  6887. VULKAN_HPP_INLINE std::string to_string( ShaderFloatControlsIndependence value )
  6888. {
  6889. switch ( value )
  6890. {
  6891. case ShaderFloatControlsIndependence::e32BitOnly : return "32BitOnly";
  6892. case ShaderFloatControlsIndependence::eAll : return "All";
  6893. case ShaderFloatControlsIndependence::eNone : return "None";
  6894. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6895. }
  6896. }
  6897. enum class ShaderGroupShaderKHR
  6898. {
  6899. eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR,
  6900. eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR,
  6901. eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR,
  6902. eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR};
  6903. VULKAN_HPP_INLINE std::string to_string( ShaderGroupShaderKHR value )
  6904. {
  6905. switch ( value )
  6906. {
  6907. case ShaderGroupShaderKHR::eGeneral : return "General";
  6908. case ShaderGroupShaderKHR::eClosestHit : return "ClosestHit";
  6909. case ShaderGroupShaderKHR::eAnyHit : return "AnyHit";
  6910. case ShaderGroupShaderKHR::eIntersection : return "Intersection";
  6911. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6912. }
  6913. }
  6914. enum class ShaderInfoTypeAMD
  6915. {
  6916. eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
  6917. eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
  6918. eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD};
  6919. VULKAN_HPP_INLINE std::string to_string( ShaderInfoTypeAMD value )
  6920. {
  6921. switch ( value )
  6922. {
  6923. case ShaderInfoTypeAMD::eStatistics : return "Statistics";
  6924. case ShaderInfoTypeAMD::eBinary : return "Binary";
  6925. case ShaderInfoTypeAMD::eDisassembly : return "Disassembly";
  6926. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6927. }
  6928. }
  6929. enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
  6930. {};
  6931. VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlagBits )
  6932. {
  6933. return "(void)";
  6934. }
  6935. enum class ShaderStageFlagBits : VkShaderStageFlags
  6936. {
  6937. eVertex = VK_SHADER_STAGE_VERTEX_BIT,
  6938. eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
  6939. eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
  6940. eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
  6941. eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
  6942. eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
  6943. eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
  6944. eAll = VK_SHADER_STAGE_ALL,
  6945. eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
  6946. eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
  6947. eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
  6948. eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
  6949. eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
  6950. eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
  6951. eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV,
  6952. eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
  6953. eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
  6954. eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
  6955. eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
  6956. eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
  6957. eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
  6958. eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV};
  6959. VULKAN_HPP_INLINE std::string to_string( ShaderStageFlagBits value )
  6960. {
  6961. switch ( value )
  6962. {
  6963. case ShaderStageFlagBits::eVertex : return "Vertex";
  6964. case ShaderStageFlagBits::eTessellationControl : return "TessellationControl";
  6965. case ShaderStageFlagBits::eTessellationEvaluation : return "TessellationEvaluation";
  6966. case ShaderStageFlagBits::eGeometry : return "Geometry";
  6967. case ShaderStageFlagBits::eFragment : return "Fragment";
  6968. case ShaderStageFlagBits::eCompute : return "Compute";
  6969. case ShaderStageFlagBits::eAllGraphics : return "AllGraphics";
  6970. case ShaderStageFlagBits::eAll : return "All";
  6971. case ShaderStageFlagBits::eRaygenKHR : return "RaygenKHR";
  6972. case ShaderStageFlagBits::eAnyHitKHR : return "AnyHitKHR";
  6973. case ShaderStageFlagBits::eClosestHitKHR : return "ClosestHitKHR";
  6974. case ShaderStageFlagBits::eMissKHR : return "MissKHR";
  6975. case ShaderStageFlagBits::eIntersectionKHR : return "IntersectionKHR";
  6976. case ShaderStageFlagBits::eCallableKHR : return "CallableKHR";
  6977. case ShaderStageFlagBits::eTaskNV : return "TaskNV";
  6978. case ShaderStageFlagBits::eMeshNV : return "MeshNV";
  6979. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  6980. }
  6981. }
  6982. enum class ShadingRatePaletteEntryNV
  6983. {
  6984. eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
  6985. e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
  6986. e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
  6987. e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
  6988. e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
  6989. e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
  6990. e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
  6991. e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
  6992. e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
  6993. e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
  6994. e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
  6995. e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV};
  6996. VULKAN_HPP_INLINE std::string to_string( ShadingRatePaletteEntryNV value )
  6997. {
  6998. switch ( value )
  6999. {
  7000. case ShadingRatePaletteEntryNV::eNoInvocations : return "NoInvocations";
  7001. case ShadingRatePaletteEntryNV::e16InvocationsPerPixel : return "16InvocationsPerPixel";
  7002. case ShadingRatePaletteEntryNV::e8InvocationsPerPixel : return "8InvocationsPerPixel";
  7003. case ShadingRatePaletteEntryNV::e4InvocationsPerPixel : return "4InvocationsPerPixel";
  7004. case ShadingRatePaletteEntryNV::e2InvocationsPerPixel : return "2InvocationsPerPixel";
  7005. case ShadingRatePaletteEntryNV::e1InvocationPerPixel : return "1InvocationPerPixel";
  7006. case ShadingRatePaletteEntryNV::e1InvocationPer2X1Pixels : return "1InvocationPer2X1Pixels";
  7007. case ShadingRatePaletteEntryNV::e1InvocationPer1X2Pixels : return "1InvocationPer1X2Pixels";
  7008. case ShadingRatePaletteEntryNV::e1InvocationPer2X2Pixels : return "1InvocationPer2X2Pixels";
  7009. case ShadingRatePaletteEntryNV::e1InvocationPer4X2Pixels : return "1InvocationPer4X2Pixels";
  7010. case ShadingRatePaletteEntryNV::e1InvocationPer2X4Pixels : return "1InvocationPer2X4Pixels";
  7011. case ShadingRatePaletteEntryNV::e1InvocationPer4X4Pixels : return "1InvocationPer4X4Pixels";
  7012. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7013. }
  7014. }
  7015. enum class SharingMode
  7016. {
  7017. eExclusive = VK_SHARING_MODE_EXCLUSIVE,
  7018. eConcurrent = VK_SHARING_MODE_CONCURRENT};
  7019. VULKAN_HPP_INLINE std::string to_string( SharingMode value )
  7020. {
  7021. switch ( value )
  7022. {
  7023. case SharingMode::eExclusive : return "Exclusive";
  7024. case SharingMode::eConcurrent : return "Concurrent";
  7025. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7026. }
  7027. }
  7028. enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
  7029. {
  7030. eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
  7031. eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
  7032. eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT};
  7033. VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlagBits value )
  7034. {
  7035. switch ( value )
  7036. {
  7037. case SparseImageFormatFlagBits::eSingleMiptail : return "SingleMiptail";
  7038. case SparseImageFormatFlagBits::eAlignedMipSize : return "AlignedMipSize";
  7039. case SparseImageFormatFlagBits::eNonstandardBlockSize : return "NonstandardBlockSize";
  7040. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7041. }
  7042. }
  7043. enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
  7044. {
  7045. eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT};
  7046. VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlagBits value )
  7047. {
  7048. switch ( value )
  7049. {
  7050. case SparseMemoryBindFlagBits::eMetadata : return "Metadata";
  7051. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7052. }
  7053. }
  7054. enum class StencilFaceFlagBits : VkStencilFaceFlags
  7055. {
  7056. eFront = VK_STENCIL_FACE_FRONT_BIT,
  7057. eBack = VK_STENCIL_FACE_BACK_BIT,
  7058. eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
  7059. eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK};
  7060. VULKAN_HPP_INLINE std::string to_string( StencilFaceFlagBits value )
  7061. {
  7062. switch ( value )
  7063. {
  7064. case StencilFaceFlagBits::eFront : return "Front";
  7065. case StencilFaceFlagBits::eBack : return "Back";
  7066. case StencilFaceFlagBits::eFrontAndBack : return "FrontAndBack";
  7067. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7068. }
  7069. }
  7070. enum class StencilOp
  7071. {
  7072. eKeep = VK_STENCIL_OP_KEEP,
  7073. eZero = VK_STENCIL_OP_ZERO,
  7074. eReplace = VK_STENCIL_OP_REPLACE,
  7075. eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
  7076. eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
  7077. eInvert = VK_STENCIL_OP_INVERT,
  7078. eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
  7079. eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP};
  7080. VULKAN_HPP_INLINE std::string to_string( StencilOp value )
  7081. {
  7082. switch ( value )
  7083. {
  7084. case StencilOp::eKeep : return "Keep";
  7085. case StencilOp::eZero : return "Zero";
  7086. case StencilOp::eReplace : return "Replace";
  7087. case StencilOp::eIncrementAndClamp : return "IncrementAndClamp";
  7088. case StencilOp::eDecrementAndClamp : return "DecrementAndClamp";
  7089. case StencilOp::eInvert : return "Invert";
  7090. case StencilOp::eIncrementAndWrap : return "IncrementAndWrap";
  7091. case StencilOp::eDecrementAndWrap : return "DecrementAndWrap";
  7092. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  7093. }
  7094. }
  7095. enum class StructureType
  7096. {
  7097. eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  7098. eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  7099. eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  7100. eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  7101. eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  7102. eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  7103. eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
  7104. eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
  7105. eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
  7106. eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  7107. eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
  7108. eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
  7109. eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  7110. eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
  7111. eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  7112. eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  7113. eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
  7114. ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
  7115. ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
  7116. ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
  7117. ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
  7118. ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
  7119. ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
  7120. ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
  7121. ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
  7122. ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
  7123. ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
  7124. ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
  7125. eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
  7126. eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
  7127. ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  7128. eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  7129. eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  7130. eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  7131. eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  7132. eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
  7133. eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
  7134. eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  7135. eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  7136. eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  7137. eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  7138. eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
  7139. eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  7140. eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
  7141. eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
  7142. eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  7143. eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
  7144. eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
  7145. eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
  7146. ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
  7147. eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
  7148. eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
  7149. ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
  7150. eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
  7151. eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
  7152. eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
  7153. eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
  7154. eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
  7155. eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
  7156. eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
  7157. eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
  7158. eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
  7159. ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
  7160. eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
  7161. eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
  7162. eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
  7163. eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
  7164. eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
  7165. eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
  7166. ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
  7167. ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
  7168. eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
  7169. eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
  7170. ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
  7171. eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
  7172. ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
  7173. eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
  7174. ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
  7175. ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
  7176. eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
  7177. eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
  7178. ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
  7179. eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
  7180. ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
  7181. ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
  7182. ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
  7183. eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
  7184. ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
  7185. ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
  7186. eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
  7187. eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
  7188. eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
  7189. eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
  7190. eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
  7191. ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
  7192. eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
  7193. eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
  7194. ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
  7195. eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
  7196. ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
  7197. eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
  7198. ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
  7199. eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
  7200. eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
  7201. eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
  7202. ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
  7203. eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
  7204. eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
  7205. eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
  7206. ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
  7207. eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
  7208. ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
  7209. eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
  7210. ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
  7211. ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
  7212. ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
  7213. ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
  7214. ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
  7215. eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
  7216. eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
  7217. eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
  7218. eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
  7219. eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
  7220. eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
  7221. eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
  7222. eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
  7223. ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
  7224. ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
  7225. ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
  7226. ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
  7227. ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
  7228. eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
  7229. ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
  7230. ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
  7231. eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
  7232. eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
  7233. ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
  7234. eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
  7235. ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
  7236. eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
  7237. ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
  7238. eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
  7239. ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
  7240. ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
  7241. eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
  7242. eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
  7243. eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
  7244. ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
  7245. ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
  7246. ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
  7247. eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
  7248. eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
  7249. ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
  7250. ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
  7251. ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
  7252. eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
  7253. eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
  7254. eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
  7255. eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
  7256. ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
  7257. eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
  7258. eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
  7259. eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
  7260. eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
  7261. eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  7262. ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  7263. eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
  7264. eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
  7265. eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
  7266. eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
  7267. eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
  7268. eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
  7269. eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
  7270. eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
  7271. eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
  7272. #ifdef VK_USE_PLATFORM_XLIB_KHR
  7273. eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
  7274. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  7275. #ifdef VK_USE_PLATFORM_XCB_KHR
  7276. eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
  7277. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  7278. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  7279. eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
  7280. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  7281. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7282. eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
  7283. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7284. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7285. eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
  7286. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7287. eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
  7288. ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
  7289. eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
  7290. eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
  7291. eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
  7292. eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
  7293. eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
  7294. eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
  7295. ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
  7296. ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
  7297. ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
  7298. eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
  7299. eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
  7300. eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
  7301. #ifdef VK_USE_PLATFORM_GGP
  7302. eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
  7303. #endif /*VK_USE_PLATFORM_GGP*/
  7304. ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
  7305. eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
  7306. eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
  7307. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7308. eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
  7309. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7310. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7311. eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
  7312. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7313. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7314. eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
  7315. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7316. eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
  7317. #ifdef VK_USE_PLATFORM_VI_NN
  7318. eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
  7319. #endif /*VK_USE_PLATFORM_VI_NN*/
  7320. ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
  7321. eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
  7322. ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
  7323. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7324. eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
  7325. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7326. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7327. eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
  7328. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7329. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7330. eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
  7331. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7332. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7333. eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
  7334. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7335. eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
  7336. eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
  7337. eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
  7338. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7339. eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
  7340. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7341. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7342. eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
  7343. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7344. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7345. eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
  7346. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7347. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7348. eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
  7349. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7350. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7351. eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
  7352. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7353. eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
  7354. eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
  7355. ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
  7356. eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
  7357. ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
  7358. eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
  7359. ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
  7360. ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
  7361. eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
  7362. eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
  7363. eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
  7364. eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
  7365. eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
  7366. ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
  7367. ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
  7368. ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
  7369. ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
  7370. ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
  7371. ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
  7372. ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
  7373. ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
  7374. ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
  7375. eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
  7376. eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
  7377. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7378. eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
  7379. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7380. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7381. eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
  7382. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7383. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7384. eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
  7385. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7386. eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
  7387. eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
  7388. ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
  7389. ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
  7390. eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
  7391. ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
  7392. eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
  7393. ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
  7394. ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
  7395. ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
  7396. eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
  7397. eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
  7398. eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR,
  7399. eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR,
  7400. eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR,
  7401. eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR,
  7402. eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR,
  7403. #ifdef VK_USE_PLATFORM_IOS_MVK
  7404. eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
  7405. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  7406. #ifdef VK_USE_PLATFORM_MACOS_MVK
  7407. eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
  7408. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  7409. eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
  7410. eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
  7411. eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
  7412. eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
  7413. eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
  7414. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7415. eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
  7416. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7417. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7418. eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
  7419. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7420. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7421. eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
  7422. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7423. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7424. eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
  7425. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7426. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7427. eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
  7428. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7429. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7430. eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
  7431. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7432. ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
  7433. ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
  7434. eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT,
  7435. eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
  7436. eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
  7437. eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
  7438. ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
  7439. ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
  7440. eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
  7441. ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
  7442. ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
  7443. ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
  7444. ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
  7445. eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
  7446. eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
  7447. eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
  7448. eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
  7449. eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
  7450. eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
  7451. eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
  7452. eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
  7453. eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
  7454. eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
  7455. eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
  7456. ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
  7457. ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
  7458. eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
  7459. eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
  7460. ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
  7461. ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
  7462. eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
  7463. eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
  7464. eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
  7465. ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
  7466. ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
  7467. ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
  7468. ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
  7469. eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
  7470. ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
  7471. eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
  7472. eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
  7473. eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
  7474. eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
  7475. eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
  7476. #ifdef VK_ENABLE_BETA_EXTENSIONS
  7477. ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
  7478. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  7479. #ifdef VK_ENABLE_BETA_EXTENSIONS
  7480. ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
  7481. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  7482. ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
  7483. ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
  7484. ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
  7485. ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
  7486. eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
  7487. eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
  7488. eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
  7489. eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
  7490. eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
  7491. eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
  7492. eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
  7493. eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
  7494. ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
  7495. eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
  7496. eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
  7497. ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
  7498. ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
  7499. ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
  7500. eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
  7501. eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
  7502. eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
  7503. eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
  7504. ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
  7505. ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
  7506. ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
  7507. eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
  7508. ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
  7509. eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
  7510. ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
  7511. ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
  7512. ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
  7513. #ifdef VK_USE_PLATFORM_GGP
  7514. ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
  7515. #endif /*VK_USE_PLATFORM_GGP*/
  7516. ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
  7517. ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
  7518. ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
  7519. ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
  7520. ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
  7521. ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
  7522. ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
  7523. ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
  7524. eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
  7525. eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
  7526. ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
  7527. eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
  7528. eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
  7529. ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
  7530. ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
  7531. ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
  7532. ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
  7533. ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
  7534. eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
  7535. eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
  7536. #ifdef VK_USE_PLATFORM_FUCHSIA
  7537. eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
  7538. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  7539. ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
  7540. #ifdef VK_USE_PLATFORM_METAL_EXT
  7541. eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
  7542. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  7543. ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
  7544. ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
  7545. eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
  7546. ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
  7547. ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
  7548. ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
  7549. eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
  7550. ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
  7551. ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
  7552. ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
  7553. ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
  7554. ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
  7555. ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
  7556. ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
  7557. ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
  7558. ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
  7559. eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
  7560. eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
  7561. ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
  7562. ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
  7563. eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
  7564. ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
  7565. eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
  7566. ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
  7567. eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
  7568. ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
  7569. ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
  7570. ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
  7571. eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
  7572. ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
  7573. ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
  7574. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7575. eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
  7576. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7577. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7578. eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
  7579. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7580. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7581. eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
  7582. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7583. eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
  7584. ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
  7585. ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
  7586. ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
  7587. ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
  7588. ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
  7589. ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
  7590. ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
  7591. ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
  7592. ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
  7593. ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
  7594. ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
  7595. ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
  7596. ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
  7597. ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
  7598. eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
  7599. eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
  7600. eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
  7601. eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
  7602. eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
  7603. eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
  7604. ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
  7605. ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
  7606. ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
  7607. eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
  7608. eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
  7609. ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
  7610. eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
  7611. eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
  7612. ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
  7613. ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
  7614. eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
  7615. ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
  7616. ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
  7617. ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
  7618. ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
  7619. eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
  7620. ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
  7621. ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
  7622. ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
  7623. eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
  7624. ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
  7625. ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
  7626. ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
  7627. ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
  7628. ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
  7629. ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
  7630. eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
  7631. ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
  7632. ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
  7633. eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
  7634. eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
  7635. eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
  7636. eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
  7637. eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
  7638. eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
  7639. eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
  7640. eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
  7641. eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
  7642. eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
  7643. eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
  7644. ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
  7645. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  7646. eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
  7647. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  7648. ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
  7649. eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
  7650. eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
  7651. eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
  7652. eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
  7653. eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
  7654. eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
  7655. eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
  7656. eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
  7657. eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
  7658. eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
  7659. eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
  7660. eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
  7661. eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
  7662. eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
  7663. eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
  7664. eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
  7665. eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
  7666. eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
  7667. eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
  7668. eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
  7669. eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
  7670. eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
  7671. eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
  7672. eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
  7673. eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
  7674. eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
  7675. eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
  7676. eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
  7677. eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
  7678. eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
  7679. eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
  7680. eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
  7681. eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
  7682. eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
  7683. eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
  7684. eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
  7685. eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
  7686. eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
  7687. eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
  7688. eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
  7689. eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
  7690. eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
  7691. eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
  7692. eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
  7693. eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
  7694. eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
  7695. eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
  7696. eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
  7697. eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
  7698. eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
  7699. ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
  7700. ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
  7701. ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
  7702. ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
  7703. ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
  7704. ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
  7705. ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
  7706. ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
  7707. ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
  7708. ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
  7709. ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
  7710. ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
  7711. ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
  7712. ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
  7713. ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
  7714. ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
  7715. ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
  7716. ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
  7717. ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
  7718. ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
  7719. ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
  7720. ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
  7721. ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
  7722. ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
  7723. ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
  7724. ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
  7725. ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
  7726. ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
  7727. ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
  7728. ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
  7729. ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
  7730. ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
  7731. ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
  7732. ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
  7733. ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
  7734. ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
  7735. ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
  7736. ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
  7737. ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
  7738. ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
  7739. ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
  7740. ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
  7741. ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
  7742. eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
  7743. eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
  7744. eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
  7745. eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
  7746. eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
  7747. eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
  7748. eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
  7749. eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
  7750. eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
  7751. eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
  7752. eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
  7753. eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
  7754. eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
  7755. eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
  7756. eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
  7757. eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
  7758. eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
  7759. eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
  7760. eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
  7761. eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
  7762. eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR};
  7763. VULKAN_HPP_INLINE std::string to_string( StructureType value )
  7764. {
  7765. switch ( value )
  7766. {
  7767. case StructureType::eApplicationInfo : return "ApplicationInfo";
  7768. case StructureType::eInstanceCreateInfo : return "InstanceCreateInfo";
  7769. case StructureType::eDeviceQueueCreateInfo : return "DeviceQueueCreateInfo";
  7770. case StructureType::eDeviceCreateInfo : return "DeviceCreateInfo";
  7771. case StructureType::eSubmitInfo : return "SubmitInfo";
  7772. case StructureType::eMemoryAllocateInfo : return "MemoryAllocateInfo";
  7773. case StructureType::eMappedMemoryRange : return "MappedMemoryRange";
  7774. case StructureType::eBindSparseInfo : return "BindSparseInfo";
  7775. case StructureType::eFenceCreateInfo : return "FenceCreateInfo";
  7776. case StructureType::eSemaphoreCreateInfo : return "SemaphoreCreateInfo";
  7777. case StructureType::eEventCreateInfo : return "EventCreateInfo";
  7778. case StructureType::eQueryPoolCreateInfo : return "QueryPoolCreateInfo";
  7779. case StructureType::eBufferCreateInfo : return "BufferCreateInfo";
  7780. case StructureType::eBufferViewCreateInfo : return "BufferViewCreateInfo";
  7781. case StructureType::eImageCreateInfo : return "ImageCreateInfo";
  7782. case StructureType::eImageViewCreateInfo : return "ImageViewCreateInfo";
  7783. case StructureType::eShaderModuleCreateInfo : return "ShaderModuleCreateInfo";
  7784. case StructureType::ePipelineCacheCreateInfo : return "PipelineCacheCreateInfo";
  7785. case StructureType::ePipelineShaderStageCreateInfo : return "PipelineShaderStageCreateInfo";
  7786. case StructureType::ePipelineVertexInputStateCreateInfo : return "PipelineVertexInputStateCreateInfo";
  7787. case StructureType::ePipelineInputAssemblyStateCreateInfo : return "PipelineInputAssemblyStateCreateInfo";
  7788. case StructureType::ePipelineTessellationStateCreateInfo : return "PipelineTessellationStateCreateInfo";
  7789. case StructureType::ePipelineViewportStateCreateInfo : return "PipelineViewportStateCreateInfo";
  7790. case StructureType::ePipelineRasterizationStateCreateInfo : return "PipelineRasterizationStateCreateInfo";
  7791. case StructureType::ePipelineMultisampleStateCreateInfo : return "PipelineMultisampleStateCreateInfo";
  7792. case StructureType::ePipelineDepthStencilStateCreateInfo : return "PipelineDepthStencilStateCreateInfo";
  7793. case StructureType::ePipelineColorBlendStateCreateInfo : return "PipelineColorBlendStateCreateInfo";
  7794. case StructureType::ePipelineDynamicStateCreateInfo : return "PipelineDynamicStateCreateInfo";
  7795. case StructureType::eGraphicsPipelineCreateInfo : return "GraphicsPipelineCreateInfo";
  7796. case StructureType::eComputePipelineCreateInfo : return "ComputePipelineCreateInfo";
  7797. case StructureType::ePipelineLayoutCreateInfo : return "PipelineLayoutCreateInfo";
  7798. case StructureType::eSamplerCreateInfo : return "SamplerCreateInfo";
  7799. case StructureType::eDescriptorSetLayoutCreateInfo : return "DescriptorSetLayoutCreateInfo";
  7800. case StructureType::eDescriptorPoolCreateInfo : return "DescriptorPoolCreateInfo";
  7801. case StructureType::eDescriptorSetAllocateInfo : return "DescriptorSetAllocateInfo";
  7802. case StructureType::eWriteDescriptorSet : return "WriteDescriptorSet";
  7803. case StructureType::eCopyDescriptorSet : return "CopyDescriptorSet";
  7804. case StructureType::eFramebufferCreateInfo : return "FramebufferCreateInfo";
  7805. case StructureType::eRenderPassCreateInfo : return "RenderPassCreateInfo";
  7806. case StructureType::eCommandPoolCreateInfo : return "CommandPoolCreateInfo";
  7807. case StructureType::eCommandBufferAllocateInfo : return "CommandBufferAllocateInfo";
  7808. case StructureType::eCommandBufferInheritanceInfo : return "CommandBufferInheritanceInfo";
  7809. case StructureType::eCommandBufferBeginInfo : return "CommandBufferBeginInfo";
  7810. case StructureType::eRenderPassBeginInfo : return "RenderPassBeginInfo";
  7811. case StructureType::eBufferMemoryBarrier : return "BufferMemoryBarrier";
  7812. case StructureType::eImageMemoryBarrier : return "ImageMemoryBarrier";
  7813. case StructureType::eMemoryBarrier : return "MemoryBarrier";
  7814. case StructureType::eLoaderInstanceCreateInfo : return "LoaderInstanceCreateInfo";
  7815. case StructureType::eLoaderDeviceCreateInfo : return "LoaderDeviceCreateInfo";
  7816. case StructureType::ePhysicalDeviceSubgroupProperties : return "PhysicalDeviceSubgroupProperties";
  7817. case StructureType::eBindBufferMemoryInfo : return "BindBufferMemoryInfo";
  7818. case StructureType::eBindImageMemoryInfo : return "BindImageMemoryInfo";
  7819. case StructureType::ePhysicalDevice16BitStorageFeatures : return "PhysicalDevice16BitStorageFeatures";
  7820. case StructureType::eMemoryDedicatedRequirements : return "MemoryDedicatedRequirements";
  7821. case StructureType::eMemoryDedicatedAllocateInfo : return "MemoryDedicatedAllocateInfo";
  7822. case StructureType::eMemoryAllocateFlagsInfo : return "MemoryAllocateFlagsInfo";
  7823. case StructureType::eDeviceGroupRenderPassBeginInfo : return "DeviceGroupRenderPassBeginInfo";
  7824. case StructureType::eDeviceGroupCommandBufferBeginInfo : return "DeviceGroupCommandBufferBeginInfo";
  7825. case StructureType::eDeviceGroupSubmitInfo : return "DeviceGroupSubmitInfo";
  7826. case StructureType::eDeviceGroupBindSparseInfo : return "DeviceGroupBindSparseInfo";
  7827. case StructureType::eBindBufferMemoryDeviceGroupInfo : return "BindBufferMemoryDeviceGroupInfo";
  7828. case StructureType::eBindImageMemoryDeviceGroupInfo : return "BindImageMemoryDeviceGroupInfo";
  7829. case StructureType::ePhysicalDeviceGroupProperties : return "PhysicalDeviceGroupProperties";
  7830. case StructureType::eDeviceGroupDeviceCreateInfo : return "DeviceGroupDeviceCreateInfo";
  7831. case StructureType::eBufferMemoryRequirementsInfo2 : return "BufferMemoryRequirementsInfo2";
  7832. case StructureType::eImageMemoryRequirementsInfo2 : return "ImageMemoryRequirementsInfo2";
  7833. case StructureType::eImageSparseMemoryRequirementsInfo2 : return "ImageSparseMemoryRequirementsInfo2";
  7834. case StructureType::eMemoryRequirements2 : return "MemoryRequirements2";
  7835. case StructureType::eSparseImageMemoryRequirements2 : return "SparseImageMemoryRequirements2";
  7836. case StructureType::ePhysicalDeviceFeatures2 : return "PhysicalDeviceFeatures2";
  7837. case StructureType::ePhysicalDeviceProperties2 : return "PhysicalDeviceProperties2";
  7838. case StructureType::eFormatProperties2 : return "FormatProperties2";
  7839. case StructureType::eImageFormatProperties2 : return "ImageFormatProperties2";
  7840. case StructureType::ePhysicalDeviceImageFormatInfo2 : return "PhysicalDeviceImageFormatInfo2";
  7841. case StructureType::eQueueFamilyProperties2 : return "QueueFamilyProperties2";
  7842. case StructureType::ePhysicalDeviceMemoryProperties2 : return "PhysicalDeviceMemoryProperties2";
  7843. case StructureType::eSparseImageFormatProperties2 : return "SparseImageFormatProperties2";
  7844. case StructureType::ePhysicalDeviceSparseImageFormatInfo2 : return "PhysicalDeviceSparseImageFormatInfo2";
  7845. case StructureType::ePhysicalDevicePointClippingProperties : return "PhysicalDevicePointClippingProperties";
  7846. case StructureType::eRenderPassInputAttachmentAspectCreateInfo : return "RenderPassInputAttachmentAspectCreateInfo";
  7847. case StructureType::eImageViewUsageCreateInfo : return "ImageViewUsageCreateInfo";
  7848. case StructureType::ePipelineTessellationDomainOriginStateCreateInfo : return "PipelineTessellationDomainOriginStateCreateInfo";
  7849. case StructureType::eRenderPassMultiviewCreateInfo : return "RenderPassMultiviewCreateInfo";
  7850. case StructureType::ePhysicalDeviceMultiviewFeatures : return "PhysicalDeviceMultiviewFeatures";
  7851. case StructureType::ePhysicalDeviceMultiviewProperties : return "PhysicalDeviceMultiviewProperties";
  7852. case StructureType::ePhysicalDeviceVariablePointersFeatures : return "PhysicalDeviceVariablePointersFeatures";
  7853. case StructureType::eProtectedSubmitInfo : return "ProtectedSubmitInfo";
  7854. case StructureType::ePhysicalDeviceProtectedMemoryFeatures : return "PhysicalDeviceProtectedMemoryFeatures";
  7855. case StructureType::ePhysicalDeviceProtectedMemoryProperties : return "PhysicalDeviceProtectedMemoryProperties";
  7856. case StructureType::eDeviceQueueInfo2 : return "DeviceQueueInfo2";
  7857. case StructureType::eSamplerYcbcrConversionCreateInfo : return "SamplerYcbcrConversionCreateInfo";
  7858. case StructureType::eSamplerYcbcrConversionInfo : return "SamplerYcbcrConversionInfo";
  7859. case StructureType::eBindImagePlaneMemoryInfo : return "BindImagePlaneMemoryInfo";
  7860. case StructureType::eImagePlaneMemoryRequirementsInfo : return "ImagePlaneMemoryRequirementsInfo";
  7861. case StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures : return "PhysicalDeviceSamplerYcbcrConversionFeatures";
  7862. case StructureType::eSamplerYcbcrConversionImageFormatProperties : return "SamplerYcbcrConversionImageFormatProperties";
  7863. case StructureType::eDescriptorUpdateTemplateCreateInfo : return "DescriptorUpdateTemplateCreateInfo";
  7864. case StructureType::ePhysicalDeviceExternalImageFormatInfo : return "PhysicalDeviceExternalImageFormatInfo";
  7865. case StructureType::eExternalImageFormatProperties : return "ExternalImageFormatProperties";
  7866. case StructureType::ePhysicalDeviceExternalBufferInfo : return "PhysicalDeviceExternalBufferInfo";
  7867. case StructureType::eExternalBufferProperties : return "ExternalBufferProperties";
  7868. case StructureType::ePhysicalDeviceIdProperties : return "PhysicalDeviceIdProperties";
  7869. case StructureType::eExternalMemoryBufferCreateInfo : return "ExternalMemoryBufferCreateInfo";
  7870. case StructureType::eExternalMemoryImageCreateInfo : return "ExternalMemoryImageCreateInfo";
  7871. case StructureType::eExportMemoryAllocateInfo : return "ExportMemoryAllocateInfo";
  7872. case StructureType::ePhysicalDeviceExternalFenceInfo : return "PhysicalDeviceExternalFenceInfo";
  7873. case StructureType::eExternalFenceProperties : return "ExternalFenceProperties";
  7874. case StructureType::eExportFenceCreateInfo : return "ExportFenceCreateInfo";
  7875. case StructureType::eExportSemaphoreCreateInfo : return "ExportSemaphoreCreateInfo";
  7876. case StructureType::ePhysicalDeviceExternalSemaphoreInfo : return "PhysicalDeviceExternalSemaphoreInfo";
  7877. case StructureType::eExternalSemaphoreProperties : return "ExternalSemaphoreProperties";
  7878. case StructureType::ePhysicalDeviceMaintenance3Properties : return "PhysicalDeviceMaintenance3Properties";
  7879. case StructureType::eDescriptorSetLayoutSupport : return "DescriptorSetLayoutSupport";
  7880. case StructureType::ePhysicalDeviceShaderDrawParametersFeatures : return "PhysicalDeviceShaderDrawParametersFeatures";
  7881. case StructureType::ePhysicalDeviceVulkan11Features : return "PhysicalDeviceVulkan11Features";
  7882. case StructureType::ePhysicalDeviceVulkan11Properties : return "PhysicalDeviceVulkan11Properties";
  7883. case StructureType::ePhysicalDeviceVulkan12Features : return "PhysicalDeviceVulkan12Features";
  7884. case StructureType::ePhysicalDeviceVulkan12Properties : return "PhysicalDeviceVulkan12Properties";
  7885. case StructureType::eImageFormatListCreateInfo : return "ImageFormatListCreateInfo";
  7886. case StructureType::eAttachmentDescription2 : return "AttachmentDescription2";
  7887. case StructureType::eAttachmentReference2 : return "AttachmentReference2";
  7888. case StructureType::eSubpassDescription2 : return "SubpassDescription2";
  7889. case StructureType::eSubpassDependency2 : return "SubpassDependency2";
  7890. case StructureType::eRenderPassCreateInfo2 : return "RenderPassCreateInfo2";
  7891. case StructureType::eSubpassBeginInfo : return "SubpassBeginInfo";
  7892. case StructureType::eSubpassEndInfo : return "SubpassEndInfo";
  7893. case StructureType::ePhysicalDevice8BitStorageFeatures : return "PhysicalDevice8BitStorageFeatures";
  7894. case StructureType::ePhysicalDeviceDriverProperties : return "PhysicalDeviceDriverProperties";
  7895. case StructureType::ePhysicalDeviceShaderAtomicInt64Features : return "PhysicalDeviceShaderAtomicInt64Features";
  7896. case StructureType::ePhysicalDeviceShaderFloat16Int8Features : return "PhysicalDeviceShaderFloat16Int8Features";
  7897. case StructureType::ePhysicalDeviceFloatControlsProperties : return "PhysicalDeviceFloatControlsProperties";
  7898. case StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo : return "DescriptorSetLayoutBindingFlagsCreateInfo";
  7899. case StructureType::ePhysicalDeviceDescriptorIndexingFeatures : return "PhysicalDeviceDescriptorIndexingFeatures";
  7900. case StructureType::ePhysicalDeviceDescriptorIndexingProperties : return "PhysicalDeviceDescriptorIndexingProperties";
  7901. case StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo : return "DescriptorSetVariableDescriptorCountAllocateInfo";
  7902. case StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport : return "DescriptorSetVariableDescriptorCountLayoutSupport";
  7903. case StructureType::ePhysicalDeviceDepthStencilResolveProperties : return "PhysicalDeviceDepthStencilResolveProperties";
  7904. case StructureType::eSubpassDescriptionDepthStencilResolve : return "SubpassDescriptionDepthStencilResolve";
  7905. case StructureType::ePhysicalDeviceScalarBlockLayoutFeatures : return "PhysicalDeviceScalarBlockLayoutFeatures";
  7906. case StructureType::eImageStencilUsageCreateInfo : return "ImageStencilUsageCreateInfo";
  7907. case StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties : return "PhysicalDeviceSamplerFilterMinmaxProperties";
  7908. case StructureType::eSamplerReductionModeCreateInfo : return "SamplerReductionModeCreateInfo";
  7909. case StructureType::ePhysicalDeviceVulkanMemoryModelFeatures : return "PhysicalDeviceVulkanMemoryModelFeatures";
  7910. case StructureType::ePhysicalDeviceImagelessFramebufferFeatures : return "PhysicalDeviceImagelessFramebufferFeatures";
  7911. case StructureType::eFramebufferAttachmentsCreateInfo : return "FramebufferAttachmentsCreateInfo";
  7912. case StructureType::eFramebufferAttachmentImageInfo : return "FramebufferAttachmentImageInfo";
  7913. case StructureType::eRenderPassAttachmentBeginInfo : return "RenderPassAttachmentBeginInfo";
  7914. case StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures : return "PhysicalDeviceUniformBufferStandardLayoutFeatures";
  7915. case StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures : return "PhysicalDeviceShaderSubgroupExtendedTypesFeatures";
  7916. case StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures : return "PhysicalDeviceSeparateDepthStencilLayoutsFeatures";
  7917. case StructureType::eAttachmentReferenceStencilLayout : return "AttachmentReferenceStencilLayout";
  7918. case StructureType::eAttachmentDescriptionStencilLayout : return "AttachmentDescriptionStencilLayout";
  7919. case StructureType::ePhysicalDeviceHostQueryResetFeatures : return "PhysicalDeviceHostQueryResetFeatures";
  7920. case StructureType::ePhysicalDeviceTimelineSemaphoreFeatures : return "PhysicalDeviceTimelineSemaphoreFeatures";
  7921. case StructureType::ePhysicalDeviceTimelineSemaphoreProperties : return "PhysicalDeviceTimelineSemaphoreProperties";
  7922. case StructureType::eSemaphoreTypeCreateInfo : return "SemaphoreTypeCreateInfo";
  7923. case StructureType::eTimelineSemaphoreSubmitInfo : return "TimelineSemaphoreSubmitInfo";
  7924. case StructureType::eSemaphoreWaitInfo : return "SemaphoreWaitInfo";
  7925. case StructureType::eSemaphoreSignalInfo : return "SemaphoreSignalInfo";
  7926. case StructureType::ePhysicalDeviceBufferDeviceAddressFeatures : return "PhysicalDeviceBufferDeviceAddressFeatures";
  7927. case StructureType::eBufferDeviceAddressInfo : return "BufferDeviceAddressInfo";
  7928. case StructureType::eBufferOpaqueCaptureAddressCreateInfo : return "BufferOpaqueCaptureAddressCreateInfo";
  7929. case StructureType::eMemoryOpaqueCaptureAddressAllocateInfo : return "MemoryOpaqueCaptureAddressAllocateInfo";
  7930. case StructureType::eDeviceMemoryOpaqueCaptureAddressInfo : return "DeviceMemoryOpaqueCaptureAddressInfo";
  7931. case StructureType::eSwapchainCreateInfoKHR : return "SwapchainCreateInfoKHR";
  7932. case StructureType::ePresentInfoKHR : return "PresentInfoKHR";
  7933. case StructureType::eDeviceGroupPresentCapabilitiesKHR : return "DeviceGroupPresentCapabilitiesKHR";
  7934. case StructureType::eImageSwapchainCreateInfoKHR : return "ImageSwapchainCreateInfoKHR";
  7935. case StructureType::eBindImageMemorySwapchainInfoKHR : return "BindImageMemorySwapchainInfoKHR";
  7936. case StructureType::eAcquireNextImageInfoKHR : return "AcquireNextImageInfoKHR";
  7937. case StructureType::eDeviceGroupPresentInfoKHR : return "DeviceGroupPresentInfoKHR";
  7938. case StructureType::eDeviceGroupSwapchainCreateInfoKHR : return "DeviceGroupSwapchainCreateInfoKHR";
  7939. case StructureType::eDisplayModeCreateInfoKHR : return "DisplayModeCreateInfoKHR";
  7940. case StructureType::eDisplaySurfaceCreateInfoKHR : return "DisplaySurfaceCreateInfoKHR";
  7941. case StructureType::eDisplayPresentInfoKHR : return "DisplayPresentInfoKHR";
  7942. #ifdef VK_USE_PLATFORM_XLIB_KHR
  7943. case StructureType::eXlibSurfaceCreateInfoKHR : return "XlibSurfaceCreateInfoKHR";
  7944. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  7945. #ifdef VK_USE_PLATFORM_XCB_KHR
  7946. case StructureType::eXcbSurfaceCreateInfoKHR : return "XcbSurfaceCreateInfoKHR";
  7947. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  7948. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  7949. case StructureType::eWaylandSurfaceCreateInfoKHR : return "WaylandSurfaceCreateInfoKHR";
  7950. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  7951. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  7952. case StructureType::eAndroidSurfaceCreateInfoKHR : return "AndroidSurfaceCreateInfoKHR";
  7953. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  7954. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7955. case StructureType::eWin32SurfaceCreateInfoKHR : return "Win32SurfaceCreateInfoKHR";
  7956. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7957. case StructureType::eDebugReportCallbackCreateInfoEXT : return "DebugReportCallbackCreateInfoEXT";
  7958. case StructureType::ePipelineRasterizationStateRasterizationOrderAMD : return "PipelineRasterizationStateRasterizationOrderAMD";
  7959. case StructureType::eDebugMarkerObjectNameInfoEXT : return "DebugMarkerObjectNameInfoEXT";
  7960. case StructureType::eDebugMarkerObjectTagInfoEXT : return "DebugMarkerObjectTagInfoEXT";
  7961. case StructureType::eDebugMarkerMarkerInfoEXT : return "DebugMarkerMarkerInfoEXT";
  7962. case StructureType::eDedicatedAllocationImageCreateInfoNV : return "DedicatedAllocationImageCreateInfoNV";
  7963. case StructureType::eDedicatedAllocationBufferCreateInfoNV : return "DedicatedAllocationBufferCreateInfoNV";
  7964. case StructureType::eDedicatedAllocationMemoryAllocateInfoNV : return "DedicatedAllocationMemoryAllocateInfoNV";
  7965. case StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT : return "PhysicalDeviceTransformFeedbackFeaturesEXT";
  7966. case StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT : return "PhysicalDeviceTransformFeedbackPropertiesEXT";
  7967. case StructureType::ePipelineRasterizationStateStreamCreateInfoEXT : return "PipelineRasterizationStateStreamCreateInfoEXT";
  7968. case StructureType::eImageViewHandleInfoNVX : return "ImageViewHandleInfoNVX";
  7969. case StructureType::eImageViewAddressPropertiesNVX : return "ImageViewAddressPropertiesNVX";
  7970. case StructureType::eTextureLodGatherFormatPropertiesAMD : return "TextureLodGatherFormatPropertiesAMD";
  7971. #ifdef VK_USE_PLATFORM_GGP
  7972. case StructureType::eStreamDescriptorSurfaceCreateInfoGGP : return "StreamDescriptorSurfaceCreateInfoGGP";
  7973. #endif /*VK_USE_PLATFORM_GGP*/
  7974. case StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV : return "PhysicalDeviceCornerSampledImageFeaturesNV";
  7975. case StructureType::eExternalMemoryImageCreateInfoNV : return "ExternalMemoryImageCreateInfoNV";
  7976. case StructureType::eExportMemoryAllocateInfoNV : return "ExportMemoryAllocateInfoNV";
  7977. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7978. case StructureType::eImportMemoryWin32HandleInfoNV : return "ImportMemoryWin32HandleInfoNV";
  7979. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7980. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7981. case StructureType::eExportMemoryWin32HandleInfoNV : return "ExportMemoryWin32HandleInfoNV";
  7982. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7983. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7984. case StructureType::eWin32KeyedMutexAcquireReleaseInfoNV : return "Win32KeyedMutexAcquireReleaseInfoNV";
  7985. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7986. case StructureType::eValidationFlagsEXT : return "ValidationFlagsEXT";
  7987. #ifdef VK_USE_PLATFORM_VI_NN
  7988. case StructureType::eViSurfaceCreateInfoNN : return "ViSurfaceCreateInfoNN";
  7989. #endif /*VK_USE_PLATFORM_VI_NN*/
  7990. case StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT : return "PhysicalDeviceTextureCompressionAstcHdrFeaturesEXT";
  7991. case StructureType::eImageViewAstcDecodeModeEXT : return "ImageViewAstcDecodeModeEXT";
  7992. case StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT : return "PhysicalDeviceAstcDecodeFeaturesEXT";
  7993. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7994. case StructureType::eImportMemoryWin32HandleInfoKHR : return "ImportMemoryWin32HandleInfoKHR";
  7995. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7996. #ifdef VK_USE_PLATFORM_WIN32_KHR
  7997. case StructureType::eExportMemoryWin32HandleInfoKHR : return "ExportMemoryWin32HandleInfoKHR";
  7998. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  7999. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8000. case StructureType::eMemoryWin32HandlePropertiesKHR : return "MemoryWin32HandlePropertiesKHR";
  8001. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8002. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8003. case StructureType::eMemoryGetWin32HandleInfoKHR : return "MemoryGetWin32HandleInfoKHR";
  8004. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8005. case StructureType::eImportMemoryFdInfoKHR : return "ImportMemoryFdInfoKHR";
  8006. case StructureType::eMemoryFdPropertiesKHR : return "MemoryFdPropertiesKHR";
  8007. case StructureType::eMemoryGetFdInfoKHR : return "MemoryGetFdInfoKHR";
  8008. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8009. case StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR : return "Win32KeyedMutexAcquireReleaseInfoKHR";
  8010. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8011. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8012. case StructureType::eImportSemaphoreWin32HandleInfoKHR : return "ImportSemaphoreWin32HandleInfoKHR";
  8013. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8014. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8015. case StructureType::eExportSemaphoreWin32HandleInfoKHR : return "ExportSemaphoreWin32HandleInfoKHR";
  8016. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8017. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8018. case StructureType::eD3D12FenceSubmitInfoKHR : return "D3D12FenceSubmitInfoKHR";
  8019. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8020. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8021. case StructureType::eSemaphoreGetWin32HandleInfoKHR : return "SemaphoreGetWin32HandleInfoKHR";
  8022. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8023. case StructureType::eImportSemaphoreFdInfoKHR : return "ImportSemaphoreFdInfoKHR";
  8024. case StructureType::eSemaphoreGetFdInfoKHR : return "SemaphoreGetFdInfoKHR";
  8025. case StructureType::ePhysicalDevicePushDescriptorPropertiesKHR : return "PhysicalDevicePushDescriptorPropertiesKHR";
  8026. case StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT : return "CommandBufferInheritanceConditionalRenderingInfoEXT";
  8027. case StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT : return "PhysicalDeviceConditionalRenderingFeaturesEXT";
  8028. case StructureType::eConditionalRenderingBeginInfoEXT : return "ConditionalRenderingBeginInfoEXT";
  8029. case StructureType::ePresentRegionsKHR : return "PresentRegionsKHR";
  8030. case StructureType::ePipelineViewportWScalingStateCreateInfoNV : return "PipelineViewportWScalingStateCreateInfoNV";
  8031. case StructureType::eSurfaceCapabilities2EXT : return "SurfaceCapabilities2EXT";
  8032. case StructureType::eDisplayPowerInfoEXT : return "DisplayPowerInfoEXT";
  8033. case StructureType::eDeviceEventInfoEXT : return "DeviceEventInfoEXT";
  8034. case StructureType::eDisplayEventInfoEXT : return "DisplayEventInfoEXT";
  8035. case StructureType::eSwapchainCounterCreateInfoEXT : return "SwapchainCounterCreateInfoEXT";
  8036. case StructureType::ePresentTimesInfoGOOGLE : return "PresentTimesInfoGOOGLE";
  8037. case StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX : return "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX";
  8038. case StructureType::ePipelineViewportSwizzleStateCreateInfoNV : return "PipelineViewportSwizzleStateCreateInfoNV";
  8039. case StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT : return "PhysicalDeviceDiscardRectanglePropertiesEXT";
  8040. case StructureType::ePipelineDiscardRectangleStateCreateInfoEXT : return "PipelineDiscardRectangleStateCreateInfoEXT";
  8041. case StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT : return "PhysicalDeviceConservativeRasterizationPropertiesEXT";
  8042. case StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT : return "PipelineRasterizationConservativeStateCreateInfoEXT";
  8043. case StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT : return "PhysicalDeviceDepthClipEnableFeaturesEXT";
  8044. case StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT : return "PipelineRasterizationDepthClipStateCreateInfoEXT";
  8045. case StructureType::eHdrMetadataEXT : return "HdrMetadataEXT";
  8046. case StructureType::eSharedPresentSurfaceCapabilitiesKHR : return "SharedPresentSurfaceCapabilitiesKHR";
  8047. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8048. case StructureType::eImportFenceWin32HandleInfoKHR : return "ImportFenceWin32HandleInfoKHR";
  8049. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8050. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8051. case StructureType::eExportFenceWin32HandleInfoKHR : return "ExportFenceWin32HandleInfoKHR";
  8052. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8053. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8054. case StructureType::eFenceGetWin32HandleInfoKHR : return "FenceGetWin32HandleInfoKHR";
  8055. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8056. case StructureType::eImportFenceFdInfoKHR : return "ImportFenceFdInfoKHR";
  8057. case StructureType::eFenceGetFdInfoKHR : return "FenceGetFdInfoKHR";
  8058. case StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR : return "PhysicalDevicePerformanceQueryFeaturesKHR";
  8059. case StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR : return "PhysicalDevicePerformanceQueryPropertiesKHR";
  8060. case StructureType::eQueryPoolPerformanceCreateInfoKHR : return "QueryPoolPerformanceCreateInfoKHR";
  8061. case StructureType::ePerformanceQuerySubmitInfoKHR : return "PerformanceQuerySubmitInfoKHR";
  8062. case StructureType::eAcquireProfilingLockInfoKHR : return "AcquireProfilingLockInfoKHR";
  8063. case StructureType::ePerformanceCounterKHR : return "PerformanceCounterKHR";
  8064. case StructureType::ePerformanceCounterDescriptionKHR : return "PerformanceCounterDescriptionKHR";
  8065. case StructureType::ePhysicalDeviceSurfaceInfo2KHR : return "PhysicalDeviceSurfaceInfo2KHR";
  8066. case StructureType::eSurfaceCapabilities2KHR : return "SurfaceCapabilities2KHR";
  8067. case StructureType::eSurfaceFormat2KHR : return "SurfaceFormat2KHR";
  8068. case StructureType::eDisplayProperties2KHR : return "DisplayProperties2KHR";
  8069. case StructureType::eDisplayPlaneProperties2KHR : return "DisplayPlaneProperties2KHR";
  8070. case StructureType::eDisplayModeProperties2KHR : return "DisplayModeProperties2KHR";
  8071. case StructureType::eDisplayPlaneInfo2KHR : return "DisplayPlaneInfo2KHR";
  8072. case StructureType::eDisplayPlaneCapabilities2KHR : return "DisplayPlaneCapabilities2KHR";
  8073. #ifdef VK_USE_PLATFORM_IOS_MVK
  8074. case StructureType::eIosSurfaceCreateInfoMVK : return "IosSurfaceCreateInfoMVK";
  8075. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  8076. #ifdef VK_USE_PLATFORM_MACOS_MVK
  8077. case StructureType::eMacosSurfaceCreateInfoMVK : return "MacosSurfaceCreateInfoMVK";
  8078. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  8079. case StructureType::eDebugUtilsObjectNameInfoEXT : return "DebugUtilsObjectNameInfoEXT";
  8080. case StructureType::eDebugUtilsObjectTagInfoEXT : return "DebugUtilsObjectTagInfoEXT";
  8081. case StructureType::eDebugUtilsLabelEXT : return "DebugUtilsLabelEXT";
  8082. case StructureType::eDebugUtilsMessengerCallbackDataEXT : return "DebugUtilsMessengerCallbackDataEXT";
  8083. case StructureType::eDebugUtilsMessengerCreateInfoEXT : return "DebugUtilsMessengerCreateInfoEXT";
  8084. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8085. case StructureType::eAndroidHardwareBufferUsageANDROID : return "AndroidHardwareBufferUsageANDROID";
  8086. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8087. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8088. case StructureType::eAndroidHardwareBufferPropertiesANDROID : return "AndroidHardwareBufferPropertiesANDROID";
  8089. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8090. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8091. case StructureType::eAndroidHardwareBufferFormatPropertiesANDROID : return "AndroidHardwareBufferFormatPropertiesANDROID";
  8092. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8093. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8094. case StructureType::eImportAndroidHardwareBufferInfoANDROID : return "ImportAndroidHardwareBufferInfoANDROID";
  8095. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8096. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8097. case StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID : return "MemoryGetAndroidHardwareBufferInfoANDROID";
  8098. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8099. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8100. case StructureType::eExternalFormatANDROID : return "ExternalFormatANDROID";
  8101. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8102. case StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT : return "PhysicalDeviceInlineUniformBlockFeaturesEXT";
  8103. case StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT : return "PhysicalDeviceInlineUniformBlockPropertiesEXT";
  8104. case StructureType::eWriteDescriptorSetInlineUniformBlockEXT : return "WriteDescriptorSetInlineUniformBlockEXT";
  8105. case StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT : return "DescriptorPoolInlineUniformBlockCreateInfoEXT";
  8106. case StructureType::eSampleLocationsInfoEXT : return "SampleLocationsInfoEXT";
  8107. case StructureType::eRenderPassSampleLocationsBeginInfoEXT : return "RenderPassSampleLocationsBeginInfoEXT";
  8108. case StructureType::ePipelineSampleLocationsStateCreateInfoEXT : return "PipelineSampleLocationsStateCreateInfoEXT";
  8109. case StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT : return "PhysicalDeviceSampleLocationsPropertiesEXT";
  8110. case StructureType::eMultisamplePropertiesEXT : return "MultisamplePropertiesEXT";
  8111. case StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT : return "PhysicalDeviceBlendOperationAdvancedFeaturesEXT";
  8112. case StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT : return "PhysicalDeviceBlendOperationAdvancedPropertiesEXT";
  8113. case StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT : return "PipelineColorBlendAdvancedStateCreateInfoEXT";
  8114. case StructureType::ePipelineCoverageToColorStateCreateInfoNV : return "PipelineCoverageToColorStateCreateInfoNV";
  8115. case StructureType::eWriteDescriptorSetAccelerationStructureKHR : return "WriteDescriptorSetAccelerationStructureKHR";
  8116. case StructureType::eAccelerationStructureBuildGeometryInfoKHR : return "AccelerationStructureBuildGeometryInfoKHR";
  8117. case StructureType::eAccelerationStructureDeviceAddressInfoKHR : return "AccelerationStructureDeviceAddressInfoKHR";
  8118. case StructureType::eAccelerationStructureGeometryAabbsDataKHR : return "AccelerationStructureGeometryAabbsDataKHR";
  8119. case StructureType::eAccelerationStructureGeometryInstancesDataKHR : return "AccelerationStructureGeometryInstancesDataKHR";
  8120. case StructureType::eAccelerationStructureGeometryTrianglesDataKHR : return "AccelerationStructureGeometryTrianglesDataKHR";
  8121. case StructureType::eAccelerationStructureGeometryKHR : return "AccelerationStructureGeometryKHR";
  8122. case StructureType::eAccelerationStructureVersionInfoKHR : return "AccelerationStructureVersionInfoKHR";
  8123. case StructureType::eCopyAccelerationStructureInfoKHR : return "CopyAccelerationStructureInfoKHR";
  8124. case StructureType::eCopyAccelerationStructureToMemoryInfoKHR : return "CopyAccelerationStructureToMemoryInfoKHR";
  8125. case StructureType::eCopyMemoryToAccelerationStructureInfoKHR : return "CopyMemoryToAccelerationStructureInfoKHR";
  8126. case StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR : return "PhysicalDeviceAccelerationStructureFeaturesKHR";
  8127. case StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR : return "PhysicalDeviceAccelerationStructurePropertiesKHR";
  8128. case StructureType::eAccelerationStructureCreateInfoKHR : return "AccelerationStructureCreateInfoKHR";
  8129. case StructureType::eAccelerationStructureBuildSizesInfoKHR : return "AccelerationStructureBuildSizesInfoKHR";
  8130. case StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR : return "PhysicalDeviceRayTracingPipelineFeaturesKHR";
  8131. case StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR : return "PhysicalDeviceRayTracingPipelinePropertiesKHR";
  8132. case StructureType::eRayTracingPipelineCreateInfoKHR : return "RayTracingPipelineCreateInfoKHR";
  8133. case StructureType::eRayTracingShaderGroupCreateInfoKHR : return "RayTracingShaderGroupCreateInfoKHR";
  8134. case StructureType::eRayTracingPipelineInterfaceCreateInfoKHR : return "RayTracingPipelineInterfaceCreateInfoKHR";
  8135. case StructureType::ePhysicalDeviceRayQueryFeaturesKHR : return "PhysicalDeviceRayQueryFeaturesKHR";
  8136. case StructureType::ePipelineCoverageModulationStateCreateInfoNV : return "PipelineCoverageModulationStateCreateInfoNV";
  8137. case StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV : return "PhysicalDeviceShaderSmBuiltinsFeaturesNV";
  8138. case StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV : return "PhysicalDeviceShaderSmBuiltinsPropertiesNV";
  8139. case StructureType::eDrmFormatModifierPropertiesListEXT : return "DrmFormatModifierPropertiesListEXT";
  8140. case StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT : return "PhysicalDeviceImageDrmFormatModifierInfoEXT";
  8141. case StructureType::eImageDrmFormatModifierListCreateInfoEXT : return "ImageDrmFormatModifierListCreateInfoEXT";
  8142. case StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT : return "ImageDrmFormatModifierExplicitCreateInfoEXT";
  8143. case StructureType::eImageDrmFormatModifierPropertiesEXT : return "ImageDrmFormatModifierPropertiesEXT";
  8144. case StructureType::eValidationCacheCreateInfoEXT : return "ValidationCacheCreateInfoEXT";
  8145. case StructureType::eShaderModuleValidationCacheCreateInfoEXT : return "ShaderModuleValidationCacheCreateInfoEXT";
  8146. #ifdef VK_ENABLE_BETA_EXTENSIONS
  8147. case StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR : return "PhysicalDevicePortabilitySubsetFeaturesKHR";
  8148. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  8149. #ifdef VK_ENABLE_BETA_EXTENSIONS
  8150. case StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR : return "PhysicalDevicePortabilitySubsetPropertiesKHR";
  8151. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  8152. case StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV : return "PipelineViewportShadingRateImageStateCreateInfoNV";
  8153. case StructureType::ePhysicalDeviceShadingRateImageFeaturesNV : return "PhysicalDeviceShadingRateImageFeaturesNV";
  8154. case StructureType::ePhysicalDeviceShadingRateImagePropertiesNV : return "PhysicalDeviceShadingRateImagePropertiesNV";
  8155. case StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV : return "PipelineViewportCoarseSampleOrderStateCreateInfoNV";
  8156. case StructureType::eRayTracingPipelineCreateInfoNV : return "RayTracingPipelineCreateInfoNV";
  8157. case StructureType::eAccelerationStructureCreateInfoNV : return "AccelerationStructureCreateInfoNV";
  8158. case StructureType::eGeometryNV : return "GeometryNV";
  8159. case StructureType::eGeometryTrianglesNV : return "GeometryTrianglesNV";
  8160. case StructureType::eGeometryAabbNV : return "GeometryAabbNV";
  8161. case StructureType::eBindAccelerationStructureMemoryInfoNV : return "BindAccelerationStructureMemoryInfoNV";
  8162. case StructureType::eWriteDescriptorSetAccelerationStructureNV : return "WriteDescriptorSetAccelerationStructureNV";
  8163. case StructureType::eAccelerationStructureMemoryRequirementsInfoNV : return "AccelerationStructureMemoryRequirementsInfoNV";
  8164. case StructureType::ePhysicalDeviceRayTracingPropertiesNV : return "PhysicalDeviceRayTracingPropertiesNV";
  8165. case StructureType::eRayTracingShaderGroupCreateInfoNV : return "RayTracingShaderGroupCreateInfoNV";
  8166. case StructureType::eAccelerationStructureInfoNV : return "AccelerationStructureInfoNV";
  8167. case StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV : return "PhysicalDeviceRepresentativeFragmentTestFeaturesNV";
  8168. case StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV : return "PipelineRepresentativeFragmentTestStateCreateInfoNV";
  8169. case StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT : return "PhysicalDeviceImageViewImageFormatInfoEXT";
  8170. case StructureType::eFilterCubicImageViewImageFormatPropertiesEXT : return "FilterCubicImageViewImageFormatPropertiesEXT";
  8171. case StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT : return "DeviceQueueGlobalPriorityCreateInfoEXT";
  8172. case StructureType::eImportMemoryHostPointerInfoEXT : return "ImportMemoryHostPointerInfoEXT";
  8173. case StructureType::eMemoryHostPointerPropertiesEXT : return "MemoryHostPointerPropertiesEXT";
  8174. case StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT : return "PhysicalDeviceExternalMemoryHostPropertiesEXT";
  8175. case StructureType::ePhysicalDeviceShaderClockFeaturesKHR : return "PhysicalDeviceShaderClockFeaturesKHR";
  8176. case StructureType::ePipelineCompilerControlCreateInfoAMD : return "PipelineCompilerControlCreateInfoAMD";
  8177. case StructureType::eCalibratedTimestampInfoEXT : return "CalibratedTimestampInfoEXT";
  8178. case StructureType::ePhysicalDeviceShaderCorePropertiesAMD : return "PhysicalDeviceShaderCorePropertiesAMD";
  8179. case StructureType::eDeviceMemoryOverallocationCreateInfoAMD : return "DeviceMemoryOverallocationCreateInfoAMD";
  8180. case StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT : return "PhysicalDeviceVertexAttributeDivisorPropertiesEXT";
  8181. case StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT : return "PipelineVertexInputDivisorStateCreateInfoEXT";
  8182. case StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT : return "PhysicalDeviceVertexAttributeDivisorFeaturesEXT";
  8183. #ifdef VK_USE_PLATFORM_GGP
  8184. case StructureType::ePresentFrameTokenGGP : return "PresentFrameTokenGGP";
  8185. #endif /*VK_USE_PLATFORM_GGP*/
  8186. case StructureType::ePipelineCreationFeedbackCreateInfoEXT : return "PipelineCreationFeedbackCreateInfoEXT";
  8187. case StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV : return "PhysicalDeviceComputeShaderDerivativesFeaturesNV";
  8188. case StructureType::ePhysicalDeviceMeshShaderFeaturesNV : return "PhysicalDeviceMeshShaderFeaturesNV";
  8189. case StructureType::ePhysicalDeviceMeshShaderPropertiesNV : return "PhysicalDeviceMeshShaderPropertiesNV";
  8190. case StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV : return "PhysicalDeviceFragmentShaderBarycentricFeaturesNV";
  8191. case StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV : return "PhysicalDeviceShaderImageFootprintFeaturesNV";
  8192. case StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV : return "PipelineViewportExclusiveScissorStateCreateInfoNV";
  8193. case StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV : return "PhysicalDeviceExclusiveScissorFeaturesNV";
  8194. case StructureType::eCheckpointDataNV : return "CheckpointDataNV";
  8195. case StructureType::eQueueFamilyCheckpointPropertiesNV : return "QueueFamilyCheckpointPropertiesNV";
  8196. case StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL : return "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL";
  8197. case StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL : return "QueryPoolPerformanceQueryCreateInfoINTEL";
  8198. case StructureType::eInitializePerformanceApiInfoINTEL : return "InitializePerformanceApiInfoINTEL";
  8199. case StructureType::ePerformanceMarkerInfoINTEL : return "PerformanceMarkerInfoINTEL";
  8200. case StructureType::ePerformanceStreamMarkerInfoINTEL : return "PerformanceStreamMarkerInfoINTEL";
  8201. case StructureType::ePerformanceOverrideInfoINTEL : return "PerformanceOverrideInfoINTEL";
  8202. case StructureType::ePerformanceConfigurationAcquireInfoINTEL : return "PerformanceConfigurationAcquireInfoINTEL";
  8203. case StructureType::ePhysicalDevicePciBusInfoPropertiesEXT : return "PhysicalDevicePciBusInfoPropertiesEXT";
  8204. case StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD : return "DisplayNativeHdrSurfaceCapabilitiesAMD";
  8205. case StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD : return "SwapchainDisplayNativeHdrCreateInfoAMD";
  8206. #ifdef VK_USE_PLATFORM_FUCHSIA
  8207. case StructureType::eImagepipeSurfaceCreateInfoFUCHSIA : return "ImagepipeSurfaceCreateInfoFUCHSIA";
  8208. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  8209. case StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR : return "PhysicalDeviceShaderTerminateInvocationFeaturesKHR";
  8210. #ifdef VK_USE_PLATFORM_METAL_EXT
  8211. case StructureType::eMetalSurfaceCreateInfoEXT : return "MetalSurfaceCreateInfoEXT";
  8212. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  8213. case StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT : return "PhysicalDeviceFragmentDensityMapFeaturesEXT";
  8214. case StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT : return "PhysicalDeviceFragmentDensityMapPropertiesEXT";
  8215. case StructureType::eRenderPassFragmentDensityMapCreateInfoEXT : return "RenderPassFragmentDensityMapCreateInfoEXT";
  8216. case StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT : return "PhysicalDeviceSubgroupSizeControlPropertiesEXT";
  8217. case StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT : return "PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT";
  8218. case StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT : return "PhysicalDeviceSubgroupSizeControlFeaturesEXT";
  8219. case StructureType::eFragmentShadingRateAttachmentInfoKHR : return "FragmentShadingRateAttachmentInfoKHR";
  8220. case StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR : return "PipelineFragmentShadingRateStateCreateInfoKHR";
  8221. case StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR : return "PhysicalDeviceFragmentShadingRatePropertiesKHR";
  8222. case StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR : return "PhysicalDeviceFragmentShadingRateFeaturesKHR";
  8223. case StructureType::ePhysicalDeviceFragmentShadingRateKHR : return "PhysicalDeviceFragmentShadingRateKHR";
  8224. case StructureType::ePhysicalDeviceShaderCoreProperties2AMD : return "PhysicalDeviceShaderCoreProperties2AMD";
  8225. case StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD : return "PhysicalDeviceCoherentMemoryFeaturesAMD";
  8226. case StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT : return "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT";
  8227. case StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT : return "PhysicalDeviceMemoryBudgetPropertiesEXT";
  8228. case StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT : return "PhysicalDeviceMemoryPriorityFeaturesEXT";
  8229. case StructureType::eMemoryPriorityAllocateInfoEXT : return "MemoryPriorityAllocateInfoEXT";
  8230. case StructureType::eSurfaceProtectedCapabilitiesKHR : return "SurfaceProtectedCapabilitiesKHR";
  8231. case StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV : return "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV";
  8232. case StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT : return "PhysicalDeviceBufferDeviceAddressFeaturesEXT";
  8233. case StructureType::eBufferDeviceAddressCreateInfoEXT : return "BufferDeviceAddressCreateInfoEXT";
  8234. case StructureType::ePhysicalDeviceToolPropertiesEXT : return "PhysicalDeviceToolPropertiesEXT";
  8235. case StructureType::eValidationFeaturesEXT : return "ValidationFeaturesEXT";
  8236. case StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV : return "PhysicalDeviceCooperativeMatrixFeaturesNV";
  8237. case StructureType::eCooperativeMatrixPropertiesNV : return "CooperativeMatrixPropertiesNV";
  8238. case StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV : return "PhysicalDeviceCooperativeMatrixPropertiesNV";
  8239. case StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV : return "PhysicalDeviceCoverageReductionModeFeaturesNV";
  8240. case StructureType::ePipelineCoverageReductionStateCreateInfoNV : return "PipelineCoverageReductionStateCreateInfoNV";
  8241. case StructureType::eFramebufferMixedSamplesCombinationNV : return "FramebufferMixedSamplesCombinationNV";
  8242. case StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT : return "PhysicalDeviceFragmentShaderInterlockFeaturesEXT";
  8243. case StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT : return "PhysicalDeviceYcbcrImageArraysFeaturesEXT";
  8244. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8245. case StructureType::eSurfaceFullScreenExclusiveInfoEXT : return "SurfaceFullScreenExclusiveInfoEXT";
  8246. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8247. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8248. case StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT : return "SurfaceCapabilitiesFullScreenExclusiveEXT";
  8249. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8250. #ifdef VK_USE_PLATFORM_WIN32_KHR
  8251. case StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT : return "SurfaceFullScreenExclusiveWin32InfoEXT";
  8252. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  8253. case StructureType::eHeadlessSurfaceCreateInfoEXT : return "HeadlessSurfaceCreateInfoEXT";
  8254. case StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT : return "PhysicalDeviceLineRasterizationFeaturesEXT";
  8255. case StructureType::ePipelineRasterizationLineStateCreateInfoEXT : return "PipelineRasterizationLineStateCreateInfoEXT";
  8256. case StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT : return "PhysicalDeviceLineRasterizationPropertiesEXT";
  8257. case StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT : return "PhysicalDeviceShaderAtomicFloatFeaturesEXT";
  8258. case StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT : return "PhysicalDeviceIndexTypeUint8FeaturesEXT";
  8259. case StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT : return "PhysicalDeviceExtendedDynamicStateFeaturesEXT";
  8260. case StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR : return "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR";
  8261. case StructureType::ePipelineInfoKHR : return "PipelineInfoKHR";
  8262. case StructureType::ePipelineExecutablePropertiesKHR : return "PipelineExecutablePropertiesKHR";
  8263. case StructureType::ePipelineExecutableInfoKHR : return "PipelineExecutableInfoKHR";
  8264. case StructureType::ePipelineExecutableStatisticKHR : return "PipelineExecutableStatisticKHR";
  8265. case StructureType::ePipelineExecutableInternalRepresentationKHR : return "PipelineExecutableInternalRepresentationKHR";
  8266. case StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT : return "PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT";
  8267. case StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV : return "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV";
  8268. case StructureType::eGraphicsShaderGroupCreateInfoNV : return "GraphicsShaderGroupCreateInfoNV";
  8269. case StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV : return "GraphicsPipelineShaderGroupsCreateInfoNV";
  8270. case StructureType::eIndirectCommandsLayoutTokenNV : return "IndirectCommandsLayoutTokenNV";
  8271. case StructureType::eIndirectCommandsLayoutCreateInfoNV : return "IndirectCommandsLayoutCreateInfoNV";
  8272. case StructureType::eGeneratedCommandsInfoNV : return "GeneratedCommandsInfoNV";
  8273. case StructureType::eGeneratedCommandsMemoryRequirementsInfoNV : return "GeneratedCommandsMemoryRequirementsInfoNV";
  8274. case StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV : return "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV";
  8275. case StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT : return "PhysicalDeviceTexelBufferAlignmentFeaturesEXT";
  8276. case StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT : return "PhysicalDeviceTexelBufferAlignmentPropertiesEXT";
  8277. case StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM : return "CommandBufferInheritanceRenderPassTransformInfoQCOM";
  8278. case StructureType::eRenderPassTransformBeginInfoQCOM : return "RenderPassTransformBeginInfoQCOM";
  8279. case StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT : return "PhysicalDeviceDeviceMemoryReportFeaturesEXT";
  8280. case StructureType::eDeviceDeviceMemoryReportCreateInfoEXT : return "DeviceDeviceMemoryReportCreateInfoEXT";
  8281. case StructureType::eDeviceMemoryReportCallbackDataEXT : return "DeviceMemoryReportCallbackDataEXT";
  8282. case StructureType::ePhysicalDeviceRobustness2FeaturesEXT : return "PhysicalDeviceRobustness2FeaturesEXT";
  8283. case StructureType::ePhysicalDeviceRobustness2PropertiesEXT : return "PhysicalDeviceRobustness2PropertiesEXT";
  8284. case StructureType::eSamplerCustomBorderColorCreateInfoEXT : return "SamplerCustomBorderColorCreateInfoEXT";
  8285. case StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT : return "PhysicalDeviceCustomBorderColorPropertiesEXT";
  8286. case StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT : return "PhysicalDeviceCustomBorderColorFeaturesEXT";
  8287. case StructureType::ePipelineLibraryCreateInfoKHR : return "PipelineLibraryCreateInfoKHR";
  8288. case StructureType::ePhysicalDevicePrivateDataFeaturesEXT : return "PhysicalDevicePrivateDataFeaturesEXT";
  8289. case StructureType::eDevicePrivateDataCreateInfoEXT : return "DevicePrivateDataCreateInfoEXT";
  8290. case StructureType::ePrivateDataSlotCreateInfoEXT : return "PrivateDataSlotCreateInfoEXT";
  8291. case StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT : return "PhysicalDevicePipelineCreationCacheControlFeaturesEXT";
  8292. case StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV : return "PhysicalDeviceDiagnosticsConfigFeaturesNV";
  8293. case StructureType::eDeviceDiagnosticsConfigCreateInfoNV : return "DeviceDiagnosticsConfigCreateInfoNV";
  8294. case StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR : return "PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR";
  8295. case StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV : return "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV";
  8296. case StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV : return "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV";
  8297. case StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV : return "PipelineFragmentShadingRateEnumStateCreateInfoNV";
  8298. case StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT : return "PhysicalDeviceFragmentDensityMap2FeaturesEXT";
  8299. case StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT : return "PhysicalDeviceFragmentDensityMap2PropertiesEXT";
  8300. case StructureType::eCopyCommandTransformInfoQCOM : return "CopyCommandTransformInfoQCOM";
  8301. case StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT : return "PhysicalDeviceImageRobustnessFeaturesEXT";
  8302. case StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR : return "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR";
  8303. case StructureType::eCopyBufferInfo2KHR : return "CopyBufferInfo2KHR";
  8304. case StructureType::eCopyImageInfo2KHR : return "CopyImageInfo2KHR";
  8305. case StructureType::eCopyBufferToImageInfo2KHR : return "CopyBufferToImageInfo2KHR";
  8306. case StructureType::eCopyImageToBufferInfo2KHR : return "CopyImageToBufferInfo2KHR";
  8307. case StructureType::eBlitImageInfo2KHR : return "BlitImageInfo2KHR";
  8308. case StructureType::eResolveImageInfo2KHR : return "ResolveImageInfo2KHR";
  8309. case StructureType::eBufferCopy2KHR : return "BufferCopy2KHR";
  8310. case StructureType::eImageCopy2KHR : return "ImageCopy2KHR";
  8311. case StructureType::eImageBlit2KHR : return "ImageBlit2KHR";
  8312. case StructureType::eBufferImageCopy2KHR : return "BufferImageCopy2KHR";
  8313. case StructureType::eImageResolve2KHR : return "ImageResolve2KHR";
  8314. case StructureType::ePhysicalDevice4444FormatsFeaturesEXT : return "PhysicalDevice4444FormatsFeaturesEXT";
  8315. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  8316. case StructureType::eDirectfbSurfaceCreateInfoEXT : return "DirectfbSurfaceCreateInfoEXT";
  8317. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  8318. case StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE : return "PhysicalDeviceMutableDescriptorTypeFeaturesVALVE";
  8319. case StructureType::eMutableDescriptorTypeCreateInfoVALVE : return "MutableDescriptorTypeCreateInfoVALVE";
  8320. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8321. }
  8322. }
  8323. enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
  8324. {
  8325. eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
  8326. eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
  8327. eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
  8328. eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
  8329. eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
  8330. eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
  8331. eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
  8332. eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
  8333. ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV};
  8334. VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlagBits value )
  8335. {
  8336. switch ( value )
  8337. {
  8338. case SubgroupFeatureFlagBits::eBasic : return "Basic";
  8339. case SubgroupFeatureFlagBits::eVote : return "Vote";
  8340. case SubgroupFeatureFlagBits::eArithmetic : return "Arithmetic";
  8341. case SubgroupFeatureFlagBits::eBallot : return "Ballot";
  8342. case SubgroupFeatureFlagBits::eShuffle : return "Shuffle";
  8343. case SubgroupFeatureFlagBits::eShuffleRelative : return "ShuffleRelative";
  8344. case SubgroupFeatureFlagBits::eClustered : return "Clustered";
  8345. case SubgroupFeatureFlagBits::eQuad : return "Quad";
  8346. case SubgroupFeatureFlagBits::ePartitionedNV : return "PartitionedNV";
  8347. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8348. }
  8349. }
  8350. enum class SubpassContents
  8351. {
  8352. eInline = VK_SUBPASS_CONTENTS_INLINE,
  8353. eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS};
  8354. VULKAN_HPP_INLINE std::string to_string( SubpassContents value )
  8355. {
  8356. switch ( value )
  8357. {
  8358. case SubpassContents::eInline : return "Inline";
  8359. case SubpassContents::eSecondaryCommandBuffers : return "SecondaryCommandBuffers";
  8360. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8361. }
  8362. }
  8363. enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
  8364. {
  8365. ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
  8366. ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
  8367. eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
  8368. eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM};
  8369. VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlagBits value )
  8370. {
  8371. switch ( value )
  8372. {
  8373. case SubpassDescriptionFlagBits::ePerViewAttributesNVX : return "PerViewAttributesNVX";
  8374. case SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX : return "PerViewPositionXOnlyNVX";
  8375. case SubpassDescriptionFlagBits::eFragmentRegionQCOM : return "FragmentRegionQCOM";
  8376. case SubpassDescriptionFlagBits::eShaderResolveQCOM : return "ShaderResolveQCOM";
  8377. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8378. }
  8379. }
  8380. enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
  8381. {
  8382. eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT};
  8383. VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagBitsEXT value )
  8384. {
  8385. switch ( value )
  8386. {
  8387. case SurfaceCounterFlagBitsEXT::eVblank : return "Vblank";
  8388. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8389. }
  8390. }
  8391. enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
  8392. {
  8393. eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
  8394. eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
  8395. eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
  8396. eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
  8397. eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
  8398. eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
  8399. eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
  8400. eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
  8401. eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR};
  8402. VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagBitsKHR value )
  8403. {
  8404. switch ( value )
  8405. {
  8406. case SurfaceTransformFlagBitsKHR::eIdentity : return "Identity";
  8407. case SurfaceTransformFlagBitsKHR::eRotate90 : return "Rotate90";
  8408. case SurfaceTransformFlagBitsKHR::eRotate180 : return "Rotate180";
  8409. case SurfaceTransformFlagBitsKHR::eRotate270 : return "Rotate270";
  8410. case SurfaceTransformFlagBitsKHR::eHorizontalMirror : return "HorizontalMirror";
  8411. case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 : return "HorizontalMirrorRotate90";
  8412. case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 : return "HorizontalMirrorRotate180";
  8413. case SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 : return "HorizontalMirrorRotate270";
  8414. case SurfaceTransformFlagBitsKHR::eInherit : return "Inherit";
  8415. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8416. }
  8417. }
  8418. enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
  8419. {
  8420. eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
  8421. eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
  8422. eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR};
  8423. VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagBitsKHR value )
  8424. {
  8425. switch ( value )
  8426. {
  8427. case SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions : return "SplitInstanceBindRegions";
  8428. case SwapchainCreateFlagBitsKHR::eProtected : return "Protected";
  8429. case SwapchainCreateFlagBitsKHR::eMutableFormat : return "MutableFormat";
  8430. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8431. }
  8432. }
  8433. enum class SystemAllocationScope
  8434. {
  8435. eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
  8436. eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
  8437. eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
  8438. eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
  8439. eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE};
  8440. VULKAN_HPP_INLINE std::string to_string( SystemAllocationScope value )
  8441. {
  8442. switch ( value )
  8443. {
  8444. case SystemAllocationScope::eCommand : return "Command";
  8445. case SystemAllocationScope::eObject : return "Object";
  8446. case SystemAllocationScope::eCache : return "Cache";
  8447. case SystemAllocationScope::eDevice : return "Device";
  8448. case SystemAllocationScope::eInstance : return "Instance";
  8449. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8450. }
  8451. }
  8452. enum class TessellationDomainOrigin
  8453. {
  8454. eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
  8455. eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT};
  8456. using TessellationDomainOriginKHR = TessellationDomainOrigin;
  8457. VULKAN_HPP_INLINE std::string to_string( TessellationDomainOrigin value )
  8458. {
  8459. switch ( value )
  8460. {
  8461. case TessellationDomainOrigin::eUpperLeft : return "UpperLeft";
  8462. case TessellationDomainOrigin::eLowerLeft : return "LowerLeft";
  8463. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8464. }
  8465. }
  8466. enum class TimeDomainEXT
  8467. {
  8468. eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
  8469. eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
  8470. eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
  8471. eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT};
  8472. VULKAN_HPP_INLINE std::string to_string( TimeDomainEXT value )
  8473. {
  8474. switch ( value )
  8475. {
  8476. case TimeDomainEXT::eDevice : return "Device";
  8477. case TimeDomainEXT::eClockMonotonic : return "ClockMonotonic";
  8478. case TimeDomainEXT::eClockMonotonicRaw : return "ClockMonotonicRaw";
  8479. case TimeDomainEXT::eQueryPerformanceCounter : return "QueryPerformanceCounter";
  8480. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8481. }
  8482. }
  8483. enum class ToolPurposeFlagBitsEXT : VkToolPurposeFlagsEXT
  8484. {
  8485. eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT_EXT,
  8486. eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT_EXT,
  8487. eTracing = VK_TOOL_PURPOSE_TRACING_BIT_EXT,
  8488. eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT,
  8489. eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT,
  8490. eDebugReporting = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
  8491. eDebugMarkers = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT};
  8492. VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagBitsEXT value )
  8493. {
  8494. switch ( value )
  8495. {
  8496. case ToolPurposeFlagBitsEXT::eValidation : return "Validation";
  8497. case ToolPurposeFlagBitsEXT::eProfiling : return "Profiling";
  8498. case ToolPurposeFlagBitsEXT::eTracing : return "Tracing";
  8499. case ToolPurposeFlagBitsEXT::eAdditionalFeatures : return "AdditionalFeatures";
  8500. case ToolPurposeFlagBitsEXT::eModifyingFeatures : return "ModifyingFeatures";
  8501. case ToolPurposeFlagBitsEXT::eDebugReporting : return "DebugReporting";
  8502. case ToolPurposeFlagBitsEXT::eDebugMarkers : return "DebugMarkers";
  8503. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8504. }
  8505. }
  8506. enum class ValidationCacheHeaderVersionEXT
  8507. {
  8508. eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT};
  8509. VULKAN_HPP_INLINE std::string to_string( ValidationCacheHeaderVersionEXT value )
  8510. {
  8511. switch ( value )
  8512. {
  8513. case ValidationCacheHeaderVersionEXT::eOne : return "One";
  8514. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8515. }
  8516. }
  8517. enum class ValidationCheckEXT
  8518. {
  8519. eAll = VK_VALIDATION_CHECK_ALL_EXT,
  8520. eShaders = VK_VALIDATION_CHECK_SHADERS_EXT};
  8521. VULKAN_HPP_INLINE std::string to_string( ValidationCheckEXT value )
  8522. {
  8523. switch ( value )
  8524. {
  8525. case ValidationCheckEXT::eAll : return "All";
  8526. case ValidationCheckEXT::eShaders : return "Shaders";
  8527. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8528. }
  8529. }
  8530. enum class ValidationFeatureDisableEXT
  8531. {
  8532. eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
  8533. eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
  8534. eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
  8535. eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
  8536. eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
  8537. eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
  8538. eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT};
  8539. VULKAN_HPP_INLINE std::string to_string( ValidationFeatureDisableEXT value )
  8540. {
  8541. switch ( value )
  8542. {
  8543. case ValidationFeatureDisableEXT::eAll : return "All";
  8544. case ValidationFeatureDisableEXT::eShaders : return "Shaders";
  8545. case ValidationFeatureDisableEXT::eThreadSafety : return "ThreadSafety";
  8546. case ValidationFeatureDisableEXT::eApiParameters : return "ApiParameters";
  8547. case ValidationFeatureDisableEXT::eObjectLifetimes : return "ObjectLifetimes";
  8548. case ValidationFeatureDisableEXT::eCoreChecks : return "CoreChecks";
  8549. case ValidationFeatureDisableEXT::eUniqueHandles : return "UniqueHandles";
  8550. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8551. }
  8552. }
  8553. enum class ValidationFeatureEnableEXT
  8554. {
  8555. eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
  8556. eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
  8557. eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
  8558. eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
  8559. eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT};
  8560. VULKAN_HPP_INLINE std::string to_string( ValidationFeatureEnableEXT value )
  8561. {
  8562. switch ( value )
  8563. {
  8564. case ValidationFeatureEnableEXT::eGpuAssisted : return "GpuAssisted";
  8565. case ValidationFeatureEnableEXT::eGpuAssistedReserveBindingSlot : return "GpuAssistedReserveBindingSlot";
  8566. case ValidationFeatureEnableEXT::eBestPractices : return "BestPractices";
  8567. case ValidationFeatureEnableEXT::eDebugPrintf : return "DebugPrintf";
  8568. case ValidationFeatureEnableEXT::eSynchronizationValidation : return "SynchronizationValidation";
  8569. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8570. }
  8571. }
  8572. enum class VendorId
  8573. {
  8574. eVIV = VK_VENDOR_ID_VIV,
  8575. eVSI = VK_VENDOR_ID_VSI,
  8576. eKazan = VK_VENDOR_ID_KAZAN,
  8577. eCodeplay = VK_VENDOR_ID_CODEPLAY,
  8578. eMESA = VK_VENDOR_ID_MESA,
  8579. ePocl = VK_VENDOR_ID_POCL};
  8580. VULKAN_HPP_INLINE std::string to_string( VendorId value )
  8581. {
  8582. switch ( value )
  8583. {
  8584. case VendorId::eVIV : return "VIV";
  8585. case VendorId::eVSI : return "VSI";
  8586. case VendorId::eKazan : return "Kazan";
  8587. case VendorId::eCodeplay : return "Codeplay";
  8588. case VendorId::eMESA : return "MESA";
  8589. case VendorId::ePocl : return "Pocl";
  8590. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8591. }
  8592. }
  8593. enum class VertexInputRate
  8594. {
  8595. eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
  8596. eInstance = VK_VERTEX_INPUT_RATE_INSTANCE};
  8597. VULKAN_HPP_INLINE std::string to_string( VertexInputRate value )
  8598. {
  8599. switch ( value )
  8600. {
  8601. case VertexInputRate::eVertex : return "Vertex";
  8602. case VertexInputRate::eInstance : return "Instance";
  8603. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8604. }
  8605. }
  8606. enum class ViewportCoordinateSwizzleNV
  8607. {
  8608. ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
  8609. eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
  8610. ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
  8611. eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
  8612. ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
  8613. eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
  8614. ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
  8615. eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV};
  8616. VULKAN_HPP_INLINE std::string to_string( ViewportCoordinateSwizzleNV value )
  8617. {
  8618. switch ( value )
  8619. {
  8620. case ViewportCoordinateSwizzleNV::ePositiveX : return "PositiveX";
  8621. case ViewportCoordinateSwizzleNV::eNegativeX : return "NegativeX";
  8622. case ViewportCoordinateSwizzleNV::ePositiveY : return "PositiveY";
  8623. case ViewportCoordinateSwizzleNV::eNegativeY : return "NegativeY";
  8624. case ViewportCoordinateSwizzleNV::ePositiveZ : return "PositiveZ";
  8625. case ViewportCoordinateSwizzleNV::eNegativeZ : return "NegativeZ";
  8626. case ViewportCoordinateSwizzleNV::ePositiveW : return "PositiveW";
  8627. case ViewportCoordinateSwizzleNV::eNegativeW : return "NegativeW";
  8628. default: return "invalid ( " + VULKAN_HPP_NAMESPACE::toHexString( static_cast<uint32_t>( value ) ) + " )";
  8629. }
  8630. }
  8631. template<typename T>
  8632. struct IndexTypeValue
  8633. {};
  8634. template <>
  8635. struct IndexTypeValue<uint16_t>
  8636. {
  8637. static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
  8638. };
  8639. template <>
  8640. struct CppType<IndexType, IndexType::eUint16>
  8641. {
  8642. using Type = uint16_t;
  8643. };
  8644. template <>
  8645. struct IndexTypeValue<uint32_t>
  8646. {
  8647. static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
  8648. };
  8649. template <>
  8650. struct CppType<IndexType, IndexType::eUint32>
  8651. {
  8652. using Type = uint32_t;
  8653. };
  8654. template <>
  8655. struct IndexTypeValue<uint8_t>
  8656. {
  8657. static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
  8658. };
  8659. template <>
  8660. struct CppType<IndexType, IndexType::eUint8EXT>
  8661. {
  8662. using Type = uint8_t;
  8663. };
  8664. using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;
  8665. template <> struct FlagTraits<AccelerationStructureCreateFlagBitsKHR>
  8666. {
  8667. enum : VkFlags
  8668. {
  8669. allFlags = VkFlags(AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay)
  8670. };
  8671. };
  8672. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator|( AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8673. {
  8674. return AccelerationStructureCreateFlagsKHR( bit0 ) | bit1;
  8675. }
  8676. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator&( AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8677. {
  8678. return AccelerationStructureCreateFlagsKHR( bit0 ) & bit1;
  8679. }
  8680. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator^( AccelerationStructureCreateFlagBitsKHR bit0, AccelerationStructureCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8681. {
  8682. return AccelerationStructureCreateFlagsKHR( bit0 ) ^ bit1;
  8683. }
  8684. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccelerationStructureCreateFlagsKHR operator~( AccelerationStructureCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  8685. {
  8686. return ~( AccelerationStructureCreateFlagsKHR( bits ) );
  8687. }
  8688. VULKAN_HPP_INLINE std::string to_string( AccelerationStructureCreateFlagsKHR value )
  8689. {
  8690. if ( !value ) return "{}";
  8691. std::string result;
  8692. if ( value & AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
  8693. return "{ " + result.substr(0, result.size() - 3) + " }";
  8694. }
  8695. using AccessFlags = Flags<AccessFlagBits>;
  8696. template <> struct FlagTraits<AccessFlagBits>
  8697. {
  8698. enum : VkFlags
  8699. {
  8700. allFlags = VkFlags(AccessFlagBits::eIndirectCommandRead) |
  8701. VkFlags(AccessFlagBits::eIndexRead) |
  8702. VkFlags(AccessFlagBits::eVertexAttributeRead) |
  8703. VkFlags(AccessFlagBits::eUniformRead) |
  8704. VkFlags(AccessFlagBits::eInputAttachmentRead) |
  8705. VkFlags(AccessFlagBits::eShaderRead) |
  8706. VkFlags(AccessFlagBits::eShaderWrite) |
  8707. VkFlags(AccessFlagBits::eColorAttachmentRead) |
  8708. VkFlags(AccessFlagBits::eColorAttachmentWrite) |
  8709. VkFlags(AccessFlagBits::eDepthStencilAttachmentRead) |
  8710. VkFlags(AccessFlagBits::eDepthStencilAttachmentWrite) |
  8711. VkFlags(AccessFlagBits::eTransferRead) |
  8712. VkFlags(AccessFlagBits::eTransferWrite) |
  8713. VkFlags(AccessFlagBits::eHostRead) |
  8714. VkFlags(AccessFlagBits::eHostWrite) |
  8715. VkFlags(AccessFlagBits::eMemoryRead) |
  8716. VkFlags(AccessFlagBits::eMemoryWrite) |
  8717. VkFlags(AccessFlagBits::eTransformFeedbackWriteEXT) |
  8718. VkFlags(AccessFlagBits::eTransformFeedbackCounterReadEXT) |
  8719. VkFlags(AccessFlagBits::eTransformFeedbackCounterWriteEXT) |
  8720. VkFlags(AccessFlagBits::eConditionalRenderingReadEXT) |
  8721. VkFlags(AccessFlagBits::eColorAttachmentReadNoncoherentEXT) |
  8722. VkFlags(AccessFlagBits::eAccelerationStructureReadKHR) |
  8723. VkFlags(AccessFlagBits::eAccelerationStructureWriteKHR) |
  8724. VkFlags(AccessFlagBits::eShadingRateImageReadNV) |
  8725. VkFlags(AccessFlagBits::eFragmentDensityMapReadEXT) |
  8726. VkFlags(AccessFlagBits::eCommandPreprocessReadNV) |
  8727. VkFlags(AccessFlagBits::eCommandPreprocessWriteNV)
  8728. };
  8729. };
  8730. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator|( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8731. {
  8732. return AccessFlags( bit0 ) | bit1;
  8733. }
  8734. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator&( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8735. {
  8736. return AccessFlags( bit0 ) & bit1;
  8737. }
  8738. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator^( AccessFlagBits bit0, AccessFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8739. {
  8740. return AccessFlags( bit0 ) ^ bit1;
  8741. }
  8742. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AccessFlags operator~( AccessFlagBits bits ) VULKAN_HPP_NOEXCEPT
  8743. {
  8744. return ~( AccessFlags( bits ) );
  8745. }
  8746. VULKAN_HPP_INLINE std::string to_string( AccessFlags value )
  8747. {
  8748. if ( !value ) return "{}";
  8749. std::string result;
  8750. if ( value & AccessFlagBits::eIndirectCommandRead ) result += "IndirectCommandRead | ";
  8751. if ( value & AccessFlagBits::eIndexRead ) result += "IndexRead | ";
  8752. if ( value & AccessFlagBits::eVertexAttributeRead ) result += "VertexAttributeRead | ";
  8753. if ( value & AccessFlagBits::eUniformRead ) result += "UniformRead | ";
  8754. if ( value & AccessFlagBits::eInputAttachmentRead ) result += "InputAttachmentRead | ";
  8755. if ( value & AccessFlagBits::eShaderRead ) result += "ShaderRead | ";
  8756. if ( value & AccessFlagBits::eShaderWrite ) result += "ShaderWrite | ";
  8757. if ( value & AccessFlagBits::eColorAttachmentRead ) result += "ColorAttachmentRead | ";
  8758. if ( value & AccessFlagBits::eColorAttachmentWrite ) result += "ColorAttachmentWrite | ";
  8759. if ( value & AccessFlagBits::eDepthStencilAttachmentRead ) result += "DepthStencilAttachmentRead | ";
  8760. if ( value & AccessFlagBits::eDepthStencilAttachmentWrite ) result += "DepthStencilAttachmentWrite | ";
  8761. if ( value & AccessFlagBits::eTransferRead ) result += "TransferRead | ";
  8762. if ( value & AccessFlagBits::eTransferWrite ) result += "TransferWrite | ";
  8763. if ( value & AccessFlagBits::eHostRead ) result += "HostRead | ";
  8764. if ( value & AccessFlagBits::eHostWrite ) result += "HostWrite | ";
  8765. if ( value & AccessFlagBits::eMemoryRead ) result += "MemoryRead | ";
  8766. if ( value & AccessFlagBits::eMemoryWrite ) result += "MemoryWrite | ";
  8767. if ( value & AccessFlagBits::eTransformFeedbackWriteEXT ) result += "TransformFeedbackWriteEXT | ";
  8768. if ( value & AccessFlagBits::eTransformFeedbackCounterReadEXT ) result += "TransformFeedbackCounterReadEXT | ";
  8769. if ( value & AccessFlagBits::eTransformFeedbackCounterWriteEXT ) result += "TransformFeedbackCounterWriteEXT | ";
  8770. if ( value & AccessFlagBits::eConditionalRenderingReadEXT ) result += "ConditionalRenderingReadEXT | ";
  8771. if ( value & AccessFlagBits::eColorAttachmentReadNoncoherentEXT ) result += "ColorAttachmentReadNoncoherentEXT | ";
  8772. if ( value & AccessFlagBits::eAccelerationStructureReadKHR ) result += "AccelerationStructureReadKHR | ";
  8773. if ( value & AccessFlagBits::eAccelerationStructureWriteKHR ) result += "AccelerationStructureWriteKHR | ";
  8774. if ( value & AccessFlagBits::eShadingRateImageReadNV ) result += "ShadingRateImageReadNV | ";
  8775. if ( value & AccessFlagBits::eFragmentDensityMapReadEXT ) result += "FragmentDensityMapReadEXT | ";
  8776. if ( value & AccessFlagBits::eCommandPreprocessReadNV ) result += "CommandPreprocessReadNV | ";
  8777. if ( value & AccessFlagBits::eCommandPreprocessWriteNV ) result += "CommandPreprocessWriteNV | ";
  8778. return "{ " + result.substr(0, result.size() - 3) + " }";
  8779. }
  8780. using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
  8781. VULKAN_HPP_INLINE std::string to_string( AcquireProfilingLockFlagsKHR )
  8782. {
  8783. return "{}";
  8784. }
  8785. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  8786. enum class AndroidSurfaceCreateFlagBitsKHR : VkFlags
  8787. {};
  8788. VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagBitsKHR )
  8789. {
  8790. return "(void)";
  8791. }
  8792. using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
  8793. VULKAN_HPP_INLINE std::string to_string( AndroidSurfaceCreateFlagsKHR )
  8794. {
  8795. return "{}";
  8796. }
  8797. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  8798. using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
  8799. template <> struct FlagTraits<AttachmentDescriptionFlagBits>
  8800. {
  8801. enum : VkFlags
  8802. {
  8803. allFlags = VkFlags(AttachmentDescriptionFlagBits::eMayAlias)
  8804. };
  8805. };
  8806. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator|( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8807. {
  8808. return AttachmentDescriptionFlags( bit0 ) | bit1;
  8809. }
  8810. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator&( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8811. {
  8812. return AttachmentDescriptionFlags( bit0 ) & bit1;
  8813. }
  8814. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator^( AttachmentDescriptionFlagBits bit0, AttachmentDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8815. {
  8816. return AttachmentDescriptionFlags( bit0 ) ^ bit1;
  8817. }
  8818. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR AttachmentDescriptionFlags operator~( AttachmentDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
  8819. {
  8820. return ~( AttachmentDescriptionFlags( bits ) );
  8821. }
  8822. VULKAN_HPP_INLINE std::string to_string( AttachmentDescriptionFlags value )
  8823. {
  8824. if ( !value ) return "{}";
  8825. std::string result;
  8826. if ( value & AttachmentDescriptionFlagBits::eMayAlias ) result += "MayAlias | ";
  8827. return "{ " + result.substr(0, result.size() - 3) + " }";
  8828. }
  8829. using BufferCreateFlags = Flags<BufferCreateFlagBits>;
  8830. template <> struct FlagTraits<BufferCreateFlagBits>
  8831. {
  8832. enum : VkFlags
  8833. {
  8834. allFlags = VkFlags(BufferCreateFlagBits::eSparseBinding) |
  8835. VkFlags(BufferCreateFlagBits::eSparseResidency) |
  8836. VkFlags(BufferCreateFlagBits::eSparseAliased) |
  8837. VkFlags(BufferCreateFlagBits::eProtected) |
  8838. VkFlags(BufferCreateFlagBits::eDeviceAddressCaptureReplay)
  8839. };
  8840. };
  8841. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator|( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8842. {
  8843. return BufferCreateFlags( bit0 ) | bit1;
  8844. }
  8845. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator&( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8846. {
  8847. return BufferCreateFlags( bit0 ) & bit1;
  8848. }
  8849. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator^( BufferCreateFlagBits bit0, BufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8850. {
  8851. return BufferCreateFlags( bit0 ) ^ bit1;
  8852. }
  8853. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferCreateFlags operator~( BufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  8854. {
  8855. return ~( BufferCreateFlags( bits ) );
  8856. }
  8857. VULKAN_HPP_INLINE std::string to_string( BufferCreateFlags value )
  8858. {
  8859. if ( !value ) return "{}";
  8860. std::string result;
  8861. if ( value & BufferCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
  8862. if ( value & BufferCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
  8863. if ( value & BufferCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
  8864. if ( value & BufferCreateFlagBits::eProtected ) result += "Protected | ";
  8865. if ( value & BufferCreateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
  8866. return "{ " + result.substr(0, result.size() - 3) + " }";
  8867. }
  8868. using BufferUsageFlags = Flags<BufferUsageFlagBits>;
  8869. template <> struct FlagTraits<BufferUsageFlagBits>
  8870. {
  8871. enum : VkFlags
  8872. {
  8873. allFlags = VkFlags(BufferUsageFlagBits::eTransferSrc) |
  8874. VkFlags(BufferUsageFlagBits::eTransferDst) |
  8875. VkFlags(BufferUsageFlagBits::eUniformTexelBuffer) |
  8876. VkFlags(BufferUsageFlagBits::eStorageTexelBuffer) |
  8877. VkFlags(BufferUsageFlagBits::eUniformBuffer) |
  8878. VkFlags(BufferUsageFlagBits::eStorageBuffer) |
  8879. VkFlags(BufferUsageFlagBits::eIndexBuffer) |
  8880. VkFlags(BufferUsageFlagBits::eVertexBuffer) |
  8881. VkFlags(BufferUsageFlagBits::eIndirectBuffer) |
  8882. VkFlags(BufferUsageFlagBits::eShaderDeviceAddress) |
  8883. VkFlags(BufferUsageFlagBits::eTransformFeedbackBufferEXT) |
  8884. VkFlags(BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT) |
  8885. VkFlags(BufferUsageFlagBits::eConditionalRenderingEXT) |
  8886. VkFlags(BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR) |
  8887. VkFlags(BufferUsageFlagBits::eAccelerationStructureStorageKHR) |
  8888. VkFlags(BufferUsageFlagBits::eShaderBindingTableKHR)
  8889. };
  8890. };
  8891. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator|( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8892. {
  8893. return BufferUsageFlags( bit0 ) | bit1;
  8894. }
  8895. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator&( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8896. {
  8897. return BufferUsageFlags( bit0 ) & bit1;
  8898. }
  8899. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator^( BufferUsageFlagBits bit0, BufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8900. {
  8901. return BufferUsageFlags( bit0 ) ^ bit1;
  8902. }
  8903. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BufferUsageFlags operator~( BufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  8904. {
  8905. return ~( BufferUsageFlags( bits ) );
  8906. }
  8907. VULKAN_HPP_INLINE std::string to_string( BufferUsageFlags value )
  8908. {
  8909. if ( !value ) return "{}";
  8910. std::string result;
  8911. if ( value & BufferUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
  8912. if ( value & BufferUsageFlagBits::eTransferDst ) result += "TransferDst | ";
  8913. if ( value & BufferUsageFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
  8914. if ( value & BufferUsageFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
  8915. if ( value & BufferUsageFlagBits::eUniformBuffer ) result += "UniformBuffer | ";
  8916. if ( value & BufferUsageFlagBits::eStorageBuffer ) result += "StorageBuffer | ";
  8917. if ( value & BufferUsageFlagBits::eIndexBuffer ) result += "IndexBuffer | ";
  8918. if ( value & BufferUsageFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
  8919. if ( value & BufferUsageFlagBits::eIndirectBuffer ) result += "IndirectBuffer | ";
  8920. if ( value & BufferUsageFlagBits::eShaderDeviceAddress ) result += "ShaderDeviceAddress | ";
  8921. if ( value & BufferUsageFlagBits::eTransformFeedbackBufferEXT ) result += "TransformFeedbackBufferEXT | ";
  8922. if ( value & BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT ) result += "TransformFeedbackCounterBufferEXT | ";
  8923. if ( value & BufferUsageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
  8924. if ( value & BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR ) result += "AccelerationStructureBuildInputReadOnlyKHR | ";
  8925. if ( value & BufferUsageFlagBits::eAccelerationStructureStorageKHR ) result += "AccelerationStructureStorageKHR | ";
  8926. if ( value & BufferUsageFlagBits::eShaderBindingTableKHR ) result += "ShaderBindingTableKHR | ";
  8927. return "{ " + result.substr(0, result.size() - 3) + " }";
  8928. }
  8929. enum class BufferViewCreateFlagBits : VkFlags
  8930. {};
  8931. VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlagBits )
  8932. {
  8933. return "(void)";
  8934. }
  8935. using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
  8936. VULKAN_HPP_INLINE std::string to_string( BufferViewCreateFlags )
  8937. {
  8938. return "{}";
  8939. }
  8940. using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
  8941. template <> struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
  8942. {
  8943. enum : VkFlags
  8944. {
  8945. allFlags = VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowUpdate) |
  8946. VkFlags(BuildAccelerationStructureFlagBitsKHR::eAllowCompaction) |
  8947. VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace) |
  8948. VkFlags(BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild) |
  8949. VkFlags(BuildAccelerationStructureFlagBitsKHR::eLowMemory)
  8950. };
  8951. };
  8952. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator|( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8953. {
  8954. return BuildAccelerationStructureFlagsKHR( bit0 ) | bit1;
  8955. }
  8956. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator&( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8957. {
  8958. return BuildAccelerationStructureFlagsKHR( bit0 ) & bit1;
  8959. }
  8960. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator^( BuildAccelerationStructureFlagBitsKHR bit0, BuildAccelerationStructureFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  8961. {
  8962. return BuildAccelerationStructureFlagsKHR( bit0 ) ^ bit1;
  8963. }
  8964. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR BuildAccelerationStructureFlagsKHR operator~( BuildAccelerationStructureFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  8965. {
  8966. return ~( BuildAccelerationStructureFlagsKHR( bits ) );
  8967. }
  8968. using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
  8969. VULKAN_HPP_INLINE std::string to_string( BuildAccelerationStructureFlagsKHR value )
  8970. {
  8971. if ( !value ) return "{}";
  8972. std::string result;
  8973. if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowUpdate ) result += "AllowUpdate | ";
  8974. if ( value & BuildAccelerationStructureFlagBitsKHR::eAllowCompaction ) result += "AllowCompaction | ";
  8975. if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace ) result += "PreferFastTrace | ";
  8976. if ( value & BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild ) result += "PreferFastBuild | ";
  8977. if ( value & BuildAccelerationStructureFlagBitsKHR::eLowMemory ) result += "LowMemory | ";
  8978. return "{ " + result.substr(0, result.size() - 3) + " }";
  8979. }
  8980. using ColorComponentFlags = Flags<ColorComponentFlagBits>;
  8981. template <> struct FlagTraits<ColorComponentFlagBits>
  8982. {
  8983. enum : VkFlags
  8984. {
  8985. allFlags = VkFlags(ColorComponentFlagBits::eR) |
  8986. VkFlags(ColorComponentFlagBits::eG) |
  8987. VkFlags(ColorComponentFlagBits::eB) |
  8988. VkFlags(ColorComponentFlagBits::eA)
  8989. };
  8990. };
  8991. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator|( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8992. {
  8993. return ColorComponentFlags( bit0 ) | bit1;
  8994. }
  8995. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator&( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  8996. {
  8997. return ColorComponentFlags( bit0 ) & bit1;
  8998. }
  8999. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator^( ColorComponentFlagBits bit0, ColorComponentFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9000. {
  9001. return ColorComponentFlags( bit0 ) ^ bit1;
  9002. }
  9003. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ColorComponentFlags operator~( ColorComponentFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9004. {
  9005. return ~( ColorComponentFlags( bits ) );
  9006. }
  9007. VULKAN_HPP_INLINE std::string to_string( ColorComponentFlags value )
  9008. {
  9009. if ( !value ) return "{}";
  9010. std::string result;
  9011. if ( value & ColorComponentFlagBits::eR ) result += "R | ";
  9012. if ( value & ColorComponentFlagBits::eG ) result += "G | ";
  9013. if ( value & ColorComponentFlagBits::eB ) result += "B | ";
  9014. if ( value & ColorComponentFlagBits::eA ) result += "A | ";
  9015. return "{ " + result.substr(0, result.size() - 3) + " }";
  9016. }
  9017. using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
  9018. template <> struct FlagTraits<CommandBufferResetFlagBits>
  9019. {
  9020. enum : VkFlags
  9021. {
  9022. allFlags = VkFlags(CommandBufferResetFlagBits::eReleaseResources)
  9023. };
  9024. };
  9025. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator|( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9026. {
  9027. return CommandBufferResetFlags( bit0 ) | bit1;
  9028. }
  9029. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator&( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9030. {
  9031. return CommandBufferResetFlags( bit0 ) & bit1;
  9032. }
  9033. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator^( CommandBufferResetFlagBits bit0, CommandBufferResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9034. {
  9035. return CommandBufferResetFlags( bit0 ) ^ bit1;
  9036. }
  9037. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferResetFlags operator~( CommandBufferResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9038. {
  9039. return ~( CommandBufferResetFlags( bits ) );
  9040. }
  9041. VULKAN_HPP_INLINE std::string to_string( CommandBufferResetFlags value )
  9042. {
  9043. if ( !value ) return "{}";
  9044. std::string result;
  9045. if ( value & CommandBufferResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
  9046. return "{ " + result.substr(0, result.size() - 3) + " }";
  9047. }
  9048. using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
  9049. template <> struct FlagTraits<CommandBufferUsageFlagBits>
  9050. {
  9051. enum : VkFlags
  9052. {
  9053. allFlags = VkFlags(CommandBufferUsageFlagBits::eOneTimeSubmit) |
  9054. VkFlags(CommandBufferUsageFlagBits::eRenderPassContinue) |
  9055. VkFlags(CommandBufferUsageFlagBits::eSimultaneousUse)
  9056. };
  9057. };
  9058. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator|( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9059. {
  9060. return CommandBufferUsageFlags( bit0 ) | bit1;
  9061. }
  9062. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator&( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9063. {
  9064. return CommandBufferUsageFlags( bit0 ) & bit1;
  9065. }
  9066. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator^( CommandBufferUsageFlagBits bit0, CommandBufferUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9067. {
  9068. return CommandBufferUsageFlags( bit0 ) ^ bit1;
  9069. }
  9070. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandBufferUsageFlags operator~( CommandBufferUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9071. {
  9072. return ~( CommandBufferUsageFlags( bits ) );
  9073. }
  9074. VULKAN_HPP_INLINE std::string to_string( CommandBufferUsageFlags value )
  9075. {
  9076. if ( !value ) return "{}";
  9077. std::string result;
  9078. if ( value & CommandBufferUsageFlagBits::eOneTimeSubmit ) result += "OneTimeSubmit | ";
  9079. if ( value & CommandBufferUsageFlagBits::eRenderPassContinue ) result += "RenderPassContinue | ";
  9080. if ( value & CommandBufferUsageFlagBits::eSimultaneousUse ) result += "SimultaneousUse | ";
  9081. return "{ " + result.substr(0, result.size() - 3) + " }";
  9082. }
  9083. using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
  9084. template <> struct FlagTraits<CommandPoolCreateFlagBits>
  9085. {
  9086. enum : VkFlags
  9087. {
  9088. allFlags = VkFlags(CommandPoolCreateFlagBits::eTransient) |
  9089. VkFlags(CommandPoolCreateFlagBits::eResetCommandBuffer) |
  9090. VkFlags(CommandPoolCreateFlagBits::eProtected)
  9091. };
  9092. };
  9093. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator|( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9094. {
  9095. return CommandPoolCreateFlags( bit0 ) | bit1;
  9096. }
  9097. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator&( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9098. {
  9099. return CommandPoolCreateFlags( bit0 ) & bit1;
  9100. }
  9101. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator^( CommandPoolCreateFlagBits bit0, CommandPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9102. {
  9103. return CommandPoolCreateFlags( bit0 ) ^ bit1;
  9104. }
  9105. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolCreateFlags operator~( CommandPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9106. {
  9107. return ~( CommandPoolCreateFlags( bits ) );
  9108. }
  9109. VULKAN_HPP_INLINE std::string to_string( CommandPoolCreateFlags value )
  9110. {
  9111. if ( !value ) return "{}";
  9112. std::string result;
  9113. if ( value & CommandPoolCreateFlagBits::eTransient ) result += "Transient | ";
  9114. if ( value & CommandPoolCreateFlagBits::eResetCommandBuffer ) result += "ResetCommandBuffer | ";
  9115. if ( value & CommandPoolCreateFlagBits::eProtected ) result += "Protected | ";
  9116. return "{ " + result.substr(0, result.size() - 3) + " }";
  9117. }
  9118. using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
  9119. template <> struct FlagTraits<CommandPoolResetFlagBits>
  9120. {
  9121. enum : VkFlags
  9122. {
  9123. allFlags = VkFlags(CommandPoolResetFlagBits::eReleaseResources)
  9124. };
  9125. };
  9126. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator|( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9127. {
  9128. return CommandPoolResetFlags( bit0 ) | bit1;
  9129. }
  9130. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator&( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9131. {
  9132. return CommandPoolResetFlags( bit0 ) & bit1;
  9133. }
  9134. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator^( CommandPoolResetFlagBits bit0, CommandPoolResetFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9135. {
  9136. return CommandPoolResetFlags( bit0 ) ^ bit1;
  9137. }
  9138. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CommandPoolResetFlags operator~( CommandPoolResetFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9139. {
  9140. return ~( CommandPoolResetFlags( bits ) );
  9141. }
  9142. VULKAN_HPP_INLINE std::string to_string( CommandPoolResetFlags value )
  9143. {
  9144. if ( !value ) return "{}";
  9145. std::string result;
  9146. if ( value & CommandPoolResetFlagBits::eReleaseResources ) result += "ReleaseResources | ";
  9147. return "{ " + result.substr(0, result.size() - 3) + " }";
  9148. }
  9149. enum class CommandPoolTrimFlagBits : VkFlags
  9150. {};
  9151. VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlagBits )
  9152. {
  9153. return "(void)";
  9154. }
  9155. using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
  9156. using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
  9157. VULKAN_HPP_INLINE std::string to_string( CommandPoolTrimFlags )
  9158. {
  9159. return "{}";
  9160. }
  9161. using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
  9162. template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
  9163. {
  9164. enum : VkFlags
  9165. {
  9166. allFlags = VkFlags(CompositeAlphaFlagBitsKHR::eOpaque) |
  9167. VkFlags(CompositeAlphaFlagBitsKHR::ePreMultiplied) |
  9168. VkFlags(CompositeAlphaFlagBitsKHR::ePostMultiplied) |
  9169. VkFlags(CompositeAlphaFlagBitsKHR::eInherit)
  9170. };
  9171. };
  9172. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator|( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9173. {
  9174. return CompositeAlphaFlagsKHR( bit0 ) | bit1;
  9175. }
  9176. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator&( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9177. {
  9178. return CompositeAlphaFlagsKHR( bit0 ) & bit1;
  9179. }
  9180. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator^( CompositeAlphaFlagBitsKHR bit0, CompositeAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9181. {
  9182. return CompositeAlphaFlagsKHR( bit0 ) ^ bit1;
  9183. }
  9184. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CompositeAlphaFlagsKHR operator~( CompositeAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  9185. {
  9186. return ~( CompositeAlphaFlagsKHR( bits ) );
  9187. }
  9188. VULKAN_HPP_INLINE std::string to_string( CompositeAlphaFlagsKHR value )
  9189. {
  9190. if ( !value ) return "{}";
  9191. std::string result;
  9192. if ( value & CompositeAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
  9193. if ( value & CompositeAlphaFlagBitsKHR::ePreMultiplied ) result += "PreMultiplied | ";
  9194. if ( value & CompositeAlphaFlagBitsKHR::ePostMultiplied ) result += "PostMultiplied | ";
  9195. if ( value & CompositeAlphaFlagBitsKHR::eInherit ) result += "Inherit | ";
  9196. return "{ " + result.substr(0, result.size() - 3) + " }";
  9197. }
  9198. using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
  9199. template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
  9200. {
  9201. enum : VkFlags
  9202. {
  9203. allFlags = VkFlags(ConditionalRenderingFlagBitsEXT::eInverted)
  9204. };
  9205. };
  9206. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator|( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9207. {
  9208. return ConditionalRenderingFlagsEXT( bit0 ) | bit1;
  9209. }
  9210. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator&( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9211. {
  9212. return ConditionalRenderingFlagsEXT( bit0 ) & bit1;
  9213. }
  9214. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator^( ConditionalRenderingFlagBitsEXT bit0, ConditionalRenderingFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9215. {
  9216. return ConditionalRenderingFlagsEXT( bit0 ) ^ bit1;
  9217. }
  9218. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ConditionalRenderingFlagsEXT operator~( ConditionalRenderingFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  9219. {
  9220. return ~( ConditionalRenderingFlagsEXT( bits ) );
  9221. }
  9222. VULKAN_HPP_INLINE std::string to_string( ConditionalRenderingFlagsEXT value )
  9223. {
  9224. if ( !value ) return "{}";
  9225. std::string result;
  9226. if ( value & ConditionalRenderingFlagBitsEXT::eInverted ) result += "Inverted | ";
  9227. return "{ " + result.substr(0, result.size() - 3) + " }";
  9228. }
  9229. using CullModeFlags = Flags<CullModeFlagBits>;
  9230. template <> struct FlagTraits<CullModeFlagBits>
  9231. {
  9232. enum : VkFlags
  9233. {
  9234. allFlags = VkFlags(CullModeFlagBits::eNone) |
  9235. VkFlags(CullModeFlagBits::eFront) |
  9236. VkFlags(CullModeFlagBits::eBack) |
  9237. VkFlags(CullModeFlagBits::eFrontAndBack)
  9238. };
  9239. };
  9240. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator|( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9241. {
  9242. return CullModeFlags( bit0 ) | bit1;
  9243. }
  9244. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator&( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9245. {
  9246. return CullModeFlags( bit0 ) & bit1;
  9247. }
  9248. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator^( CullModeFlagBits bit0, CullModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9249. {
  9250. return CullModeFlags( bit0 ) ^ bit1;
  9251. }
  9252. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR CullModeFlags operator~( CullModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9253. {
  9254. return ~( CullModeFlags( bits ) );
  9255. }
  9256. VULKAN_HPP_INLINE std::string to_string( CullModeFlags value )
  9257. {
  9258. if ( !value ) return "{}";
  9259. std::string result;
  9260. if ( value & CullModeFlagBits::eFront ) result += "Front | ";
  9261. if ( value & CullModeFlagBits::eBack ) result += "Back | ";
  9262. return "{ " + result.substr(0, result.size() - 3) + " }";
  9263. }
  9264. using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
  9265. template <> struct FlagTraits<DebugReportFlagBitsEXT>
  9266. {
  9267. enum : VkFlags
  9268. {
  9269. allFlags = VkFlags(DebugReportFlagBitsEXT::eInformation) |
  9270. VkFlags(DebugReportFlagBitsEXT::eWarning) |
  9271. VkFlags(DebugReportFlagBitsEXT::ePerformanceWarning) |
  9272. VkFlags(DebugReportFlagBitsEXT::eError) |
  9273. VkFlags(DebugReportFlagBitsEXT::eDebug)
  9274. };
  9275. };
  9276. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator|( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9277. {
  9278. return DebugReportFlagsEXT( bit0 ) | bit1;
  9279. }
  9280. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator&( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9281. {
  9282. return DebugReportFlagsEXT( bit0 ) & bit1;
  9283. }
  9284. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator^( DebugReportFlagBitsEXT bit0, DebugReportFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9285. {
  9286. return DebugReportFlagsEXT( bit0 ) ^ bit1;
  9287. }
  9288. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugReportFlagsEXT operator~( DebugReportFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  9289. {
  9290. return ~( DebugReportFlagsEXT( bits ) );
  9291. }
  9292. VULKAN_HPP_INLINE std::string to_string( DebugReportFlagsEXT value )
  9293. {
  9294. if ( !value ) return "{}";
  9295. std::string result;
  9296. if ( value & DebugReportFlagBitsEXT::eInformation ) result += "Information | ";
  9297. if ( value & DebugReportFlagBitsEXT::eWarning ) result += "Warning | ";
  9298. if ( value & DebugReportFlagBitsEXT::ePerformanceWarning ) result += "PerformanceWarning | ";
  9299. if ( value & DebugReportFlagBitsEXT::eError ) result += "Error | ";
  9300. if ( value & DebugReportFlagBitsEXT::eDebug ) result += "Debug | ";
  9301. return "{ " + result.substr(0, result.size() - 3) + " }";
  9302. }
  9303. using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
  9304. template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
  9305. {
  9306. enum : VkFlags
  9307. {
  9308. allFlags = VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eVerbose) |
  9309. VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eInfo) |
  9310. VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eWarning) |
  9311. VkFlags(DebugUtilsMessageSeverityFlagBitsEXT::eError)
  9312. };
  9313. };
  9314. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator|( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9315. {
  9316. return DebugUtilsMessageSeverityFlagsEXT( bit0 ) | bit1;
  9317. }
  9318. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator&( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9319. {
  9320. return DebugUtilsMessageSeverityFlagsEXT( bit0 ) & bit1;
  9321. }
  9322. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator^( DebugUtilsMessageSeverityFlagBitsEXT bit0, DebugUtilsMessageSeverityFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9323. {
  9324. return DebugUtilsMessageSeverityFlagsEXT( bit0 ) ^ bit1;
  9325. }
  9326. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT operator~( DebugUtilsMessageSeverityFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  9327. {
  9328. return ~( DebugUtilsMessageSeverityFlagsEXT( bits ) );
  9329. }
  9330. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageSeverityFlagsEXT value )
  9331. {
  9332. if ( !value ) return "{}";
  9333. std::string result;
  9334. if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eVerbose ) result += "Verbose | ";
  9335. if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eInfo ) result += "Info | ";
  9336. if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eWarning ) result += "Warning | ";
  9337. if ( value & DebugUtilsMessageSeverityFlagBitsEXT::eError ) result += "Error | ";
  9338. return "{ " + result.substr(0, result.size() - 3) + " }";
  9339. }
  9340. using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
  9341. template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
  9342. {
  9343. enum : VkFlags
  9344. {
  9345. allFlags = VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eGeneral) |
  9346. VkFlags(DebugUtilsMessageTypeFlagBitsEXT::eValidation) |
  9347. VkFlags(DebugUtilsMessageTypeFlagBitsEXT::ePerformance)
  9348. };
  9349. };
  9350. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator|( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9351. {
  9352. return DebugUtilsMessageTypeFlagsEXT( bit0 ) | bit1;
  9353. }
  9354. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator&( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9355. {
  9356. return DebugUtilsMessageTypeFlagsEXT( bit0 ) & bit1;
  9357. }
  9358. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator^( DebugUtilsMessageTypeFlagBitsEXT bit0, DebugUtilsMessageTypeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  9359. {
  9360. return DebugUtilsMessageTypeFlagsEXT( bit0 ) ^ bit1;
  9361. }
  9362. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DebugUtilsMessageTypeFlagsEXT operator~( DebugUtilsMessageTypeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  9363. {
  9364. return ~( DebugUtilsMessageTypeFlagsEXT( bits ) );
  9365. }
  9366. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessageTypeFlagsEXT value )
  9367. {
  9368. if ( !value ) return "{}";
  9369. std::string result;
  9370. if ( value & DebugUtilsMessageTypeFlagBitsEXT::eGeneral ) result += "General | ";
  9371. if ( value & DebugUtilsMessageTypeFlagBitsEXT::eValidation ) result += "Validation | ";
  9372. if ( value & DebugUtilsMessageTypeFlagBitsEXT::ePerformance ) result += "Performance | ";
  9373. return "{ " + result.substr(0, result.size() - 3) + " }";
  9374. }
  9375. enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkFlags
  9376. {};
  9377. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagBitsEXT )
  9378. {
  9379. return "(void)";
  9380. }
  9381. using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
  9382. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCallbackDataFlagsEXT )
  9383. {
  9384. return "{}";
  9385. }
  9386. enum class DebugUtilsMessengerCreateFlagBitsEXT : VkFlags
  9387. {};
  9388. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagBitsEXT )
  9389. {
  9390. return "(void)";
  9391. }
  9392. using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
  9393. VULKAN_HPP_INLINE std::string to_string( DebugUtilsMessengerCreateFlagsEXT )
  9394. {
  9395. return "{}";
  9396. }
  9397. using DependencyFlags = Flags<DependencyFlagBits>;
  9398. template <> struct FlagTraits<DependencyFlagBits>
  9399. {
  9400. enum : VkFlags
  9401. {
  9402. allFlags = VkFlags(DependencyFlagBits::eByRegion) |
  9403. VkFlags(DependencyFlagBits::eDeviceGroup) |
  9404. VkFlags(DependencyFlagBits::eViewLocal)
  9405. };
  9406. };
  9407. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator|( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9408. {
  9409. return DependencyFlags( bit0 ) | bit1;
  9410. }
  9411. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator&( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9412. {
  9413. return DependencyFlags( bit0 ) & bit1;
  9414. }
  9415. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator^( DependencyFlagBits bit0, DependencyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9416. {
  9417. return DependencyFlags( bit0 ) ^ bit1;
  9418. }
  9419. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DependencyFlags operator~( DependencyFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9420. {
  9421. return ~( DependencyFlags( bits ) );
  9422. }
  9423. VULKAN_HPP_INLINE std::string to_string( DependencyFlags value )
  9424. {
  9425. if ( !value ) return "{}";
  9426. std::string result;
  9427. if ( value & DependencyFlagBits::eByRegion ) result += "ByRegion | ";
  9428. if ( value & DependencyFlagBits::eDeviceGroup ) result += "DeviceGroup | ";
  9429. if ( value & DependencyFlagBits::eViewLocal ) result += "ViewLocal | ";
  9430. return "{ " + result.substr(0, result.size() - 3) + " }";
  9431. }
  9432. using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
  9433. template <> struct FlagTraits<DescriptorBindingFlagBits>
  9434. {
  9435. enum : VkFlags
  9436. {
  9437. allFlags = VkFlags(DescriptorBindingFlagBits::eUpdateAfterBind) |
  9438. VkFlags(DescriptorBindingFlagBits::eUpdateUnusedWhilePending) |
  9439. VkFlags(DescriptorBindingFlagBits::ePartiallyBound) |
  9440. VkFlags(DescriptorBindingFlagBits::eVariableDescriptorCount)
  9441. };
  9442. };
  9443. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator|( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9444. {
  9445. return DescriptorBindingFlags( bit0 ) | bit1;
  9446. }
  9447. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator&( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9448. {
  9449. return DescriptorBindingFlags( bit0 ) & bit1;
  9450. }
  9451. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator^( DescriptorBindingFlagBits bit0, DescriptorBindingFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9452. {
  9453. return DescriptorBindingFlags( bit0 ) ^ bit1;
  9454. }
  9455. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorBindingFlags operator~( DescriptorBindingFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9456. {
  9457. return ~( DescriptorBindingFlags( bits ) );
  9458. }
  9459. using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
  9460. VULKAN_HPP_INLINE std::string to_string( DescriptorBindingFlags value )
  9461. {
  9462. if ( !value ) return "{}";
  9463. std::string result;
  9464. if ( value & DescriptorBindingFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
  9465. if ( value & DescriptorBindingFlagBits::eUpdateUnusedWhilePending ) result += "UpdateUnusedWhilePending | ";
  9466. if ( value & DescriptorBindingFlagBits::ePartiallyBound ) result += "PartiallyBound | ";
  9467. if ( value & DescriptorBindingFlagBits::eVariableDescriptorCount ) result += "VariableDescriptorCount | ";
  9468. return "{ " + result.substr(0, result.size() - 3) + " }";
  9469. }
  9470. using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
  9471. template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
  9472. {
  9473. enum : VkFlags
  9474. {
  9475. allFlags = VkFlags(DescriptorPoolCreateFlagBits::eFreeDescriptorSet) |
  9476. VkFlags(DescriptorPoolCreateFlagBits::eUpdateAfterBind) |
  9477. VkFlags(DescriptorPoolCreateFlagBits::eHostOnlyVALVE)
  9478. };
  9479. };
  9480. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator|( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9481. {
  9482. return DescriptorPoolCreateFlags( bit0 ) | bit1;
  9483. }
  9484. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator&( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9485. {
  9486. return DescriptorPoolCreateFlags( bit0 ) & bit1;
  9487. }
  9488. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator^( DescriptorPoolCreateFlagBits bit0, DescriptorPoolCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9489. {
  9490. return DescriptorPoolCreateFlags( bit0 ) ^ bit1;
  9491. }
  9492. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorPoolCreateFlags operator~( DescriptorPoolCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9493. {
  9494. return ~( DescriptorPoolCreateFlags( bits ) );
  9495. }
  9496. VULKAN_HPP_INLINE std::string to_string( DescriptorPoolCreateFlags value )
  9497. {
  9498. if ( !value ) return "{}";
  9499. std::string result;
  9500. if ( value & DescriptorPoolCreateFlagBits::eFreeDescriptorSet ) result += "FreeDescriptorSet | ";
  9501. if ( value & DescriptorPoolCreateFlagBits::eUpdateAfterBind ) result += "UpdateAfterBind | ";
  9502. if ( value & DescriptorPoolCreateFlagBits::eHostOnlyVALVE ) result += "HostOnlyVALVE | ";
  9503. return "{ " + result.substr(0, result.size() - 3) + " }";
  9504. }
  9505. enum class DescriptorPoolResetFlagBits : VkFlags
  9506. {};
  9507. VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlagBits )
  9508. {
  9509. return "(void)";
  9510. }
  9511. using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
  9512. VULKAN_HPP_INLINE std::string to_string( DescriptorPoolResetFlags )
  9513. {
  9514. return "{}";
  9515. }
  9516. using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
  9517. template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
  9518. {
  9519. enum : VkFlags
  9520. {
  9521. allFlags = VkFlags(DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool) |
  9522. VkFlags(DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR) |
  9523. VkFlags(DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE)
  9524. };
  9525. };
  9526. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator|( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9527. {
  9528. return DescriptorSetLayoutCreateFlags( bit0 ) | bit1;
  9529. }
  9530. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator&( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9531. {
  9532. return DescriptorSetLayoutCreateFlags( bit0 ) & bit1;
  9533. }
  9534. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator^( DescriptorSetLayoutCreateFlagBits bit0, DescriptorSetLayoutCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9535. {
  9536. return DescriptorSetLayoutCreateFlags( bit0 ) ^ bit1;
  9537. }
  9538. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateFlags operator~( DescriptorSetLayoutCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9539. {
  9540. return ~( DescriptorSetLayoutCreateFlags( bits ) );
  9541. }
  9542. VULKAN_HPP_INLINE std::string to_string( DescriptorSetLayoutCreateFlags value )
  9543. {
  9544. if ( !value ) return "{}";
  9545. std::string result;
  9546. if ( value & DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool ) result += "UpdateAfterBindPool | ";
  9547. if ( value & DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR ) result += "PushDescriptorKHR | ";
  9548. if ( value & DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolVALVE ) result += "HostOnlyPoolVALVE | ";
  9549. return "{ " + result.substr(0, result.size() - 3) + " }";
  9550. }
  9551. enum class DescriptorUpdateTemplateCreateFlagBits : VkFlags
  9552. {};
  9553. VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlagBits )
  9554. {
  9555. return "(void)";
  9556. }
  9557. using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
  9558. using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
  9559. VULKAN_HPP_INLINE std::string to_string( DescriptorUpdateTemplateCreateFlags )
  9560. {
  9561. return "{}";
  9562. }
  9563. using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
  9564. VULKAN_HPP_INLINE std::string to_string( DeviceCreateFlags )
  9565. {
  9566. return "{}";
  9567. }
  9568. using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
  9569. template <> struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
  9570. {
  9571. enum : VkFlags
  9572. {
  9573. allFlags = VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo) |
  9574. VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking) |
  9575. VkFlags(DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints)
  9576. };
  9577. };
  9578. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator|( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9579. {
  9580. return DeviceDiagnosticsConfigFlagsNV( bit0 ) | bit1;
  9581. }
  9582. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator&( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9583. {
  9584. return DeviceDiagnosticsConfigFlagsNV( bit0 ) & bit1;
  9585. }
  9586. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator^( DeviceDiagnosticsConfigFlagBitsNV bit0, DeviceDiagnosticsConfigFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9587. {
  9588. return DeviceDiagnosticsConfigFlagsNV( bit0 ) ^ bit1;
  9589. }
  9590. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigFlagsNV operator~( DeviceDiagnosticsConfigFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  9591. {
  9592. return ~( DeviceDiagnosticsConfigFlagsNV( bits ) );
  9593. }
  9594. VULKAN_HPP_INLINE std::string to_string( DeviceDiagnosticsConfigFlagsNV value )
  9595. {
  9596. if ( !value ) return "{}";
  9597. std::string result;
  9598. if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo ) result += "EnableShaderDebugInfo | ";
  9599. if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking ) result += "EnableResourceTracking | ";
  9600. if ( value & DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints ) result += "EnableAutomaticCheckpoints | ";
  9601. return "{ " + result.substr(0, result.size() - 3) + " }";
  9602. }
  9603. using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
  9604. template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
  9605. {
  9606. enum : VkFlags
  9607. {
  9608. allFlags = VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocal) |
  9609. VkFlags(DeviceGroupPresentModeFlagBitsKHR::eRemote) |
  9610. VkFlags(DeviceGroupPresentModeFlagBitsKHR::eSum) |
  9611. VkFlags(DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice)
  9612. };
  9613. };
  9614. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator|( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9615. {
  9616. return DeviceGroupPresentModeFlagsKHR( bit0 ) | bit1;
  9617. }
  9618. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator&( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9619. {
  9620. return DeviceGroupPresentModeFlagsKHR( bit0 ) & bit1;
  9621. }
  9622. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator^( DeviceGroupPresentModeFlagBitsKHR bit0, DeviceGroupPresentModeFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9623. {
  9624. return DeviceGroupPresentModeFlagsKHR( bit0 ) ^ bit1;
  9625. }
  9626. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceGroupPresentModeFlagsKHR operator~( DeviceGroupPresentModeFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  9627. {
  9628. return ~( DeviceGroupPresentModeFlagsKHR( bits ) );
  9629. }
  9630. VULKAN_HPP_INLINE std::string to_string( DeviceGroupPresentModeFlagsKHR value )
  9631. {
  9632. if ( !value ) return "{}";
  9633. std::string result;
  9634. if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocal ) result += "Local | ";
  9635. if ( value & DeviceGroupPresentModeFlagBitsKHR::eRemote ) result += "Remote | ";
  9636. if ( value & DeviceGroupPresentModeFlagBitsKHR::eSum ) result += "Sum | ";
  9637. if ( value & DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice ) result += "LocalMultiDevice | ";
  9638. return "{ " + result.substr(0, result.size() - 3) + " }";
  9639. }
  9640. enum class DeviceMemoryReportFlagBitsEXT : VkFlags
  9641. {};
  9642. VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagBitsEXT )
  9643. {
  9644. return "(void)";
  9645. }
  9646. using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;
  9647. VULKAN_HPP_INLINE std::string to_string( DeviceMemoryReportFlagsEXT )
  9648. {
  9649. return "{}";
  9650. }
  9651. using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
  9652. template <> struct FlagTraits<DeviceQueueCreateFlagBits>
  9653. {
  9654. enum : VkFlags
  9655. {
  9656. allFlags = VkFlags(DeviceQueueCreateFlagBits::eProtected)
  9657. };
  9658. };
  9659. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator|( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9660. {
  9661. return DeviceQueueCreateFlags( bit0 ) | bit1;
  9662. }
  9663. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator&( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9664. {
  9665. return DeviceQueueCreateFlags( bit0 ) & bit1;
  9666. }
  9667. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator^( DeviceQueueCreateFlagBits bit0, DeviceQueueCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9668. {
  9669. return DeviceQueueCreateFlags( bit0 ) ^ bit1;
  9670. }
  9671. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DeviceQueueCreateFlags operator~( DeviceQueueCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9672. {
  9673. return ~( DeviceQueueCreateFlags( bits ) );
  9674. }
  9675. VULKAN_HPP_INLINE std::string to_string( DeviceQueueCreateFlags value )
  9676. {
  9677. if ( !value ) return "{}";
  9678. std::string result;
  9679. if ( value & DeviceQueueCreateFlagBits::eProtected ) result += "Protected | ";
  9680. return "{ " + result.substr(0, result.size() - 3) + " }";
  9681. }
  9682. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  9683. enum class DirectFBSurfaceCreateFlagBitsEXT : VkFlags
  9684. {};
  9685. VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagBitsEXT )
  9686. {
  9687. return "(void)";
  9688. }
  9689. using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
  9690. VULKAN_HPP_INLINE std::string to_string( DirectFBSurfaceCreateFlagsEXT )
  9691. {
  9692. return "{}";
  9693. }
  9694. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  9695. enum class DisplayModeCreateFlagBitsKHR : VkFlags
  9696. {};
  9697. VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagBitsKHR )
  9698. {
  9699. return "(void)";
  9700. }
  9701. using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
  9702. VULKAN_HPP_INLINE std::string to_string( DisplayModeCreateFlagsKHR )
  9703. {
  9704. return "{}";
  9705. }
  9706. using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
  9707. template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
  9708. {
  9709. enum : VkFlags
  9710. {
  9711. allFlags = VkFlags(DisplayPlaneAlphaFlagBitsKHR::eOpaque) |
  9712. VkFlags(DisplayPlaneAlphaFlagBitsKHR::eGlobal) |
  9713. VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixel) |
  9714. VkFlags(DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied)
  9715. };
  9716. };
  9717. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator|( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9718. {
  9719. return DisplayPlaneAlphaFlagsKHR( bit0 ) | bit1;
  9720. }
  9721. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator&( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9722. {
  9723. return DisplayPlaneAlphaFlagsKHR( bit0 ) & bit1;
  9724. }
  9725. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator^( DisplayPlaneAlphaFlagBitsKHR bit0, DisplayPlaneAlphaFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  9726. {
  9727. return DisplayPlaneAlphaFlagsKHR( bit0 ) ^ bit1;
  9728. }
  9729. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR DisplayPlaneAlphaFlagsKHR operator~( DisplayPlaneAlphaFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  9730. {
  9731. return ~( DisplayPlaneAlphaFlagsKHR( bits ) );
  9732. }
  9733. VULKAN_HPP_INLINE std::string to_string( DisplayPlaneAlphaFlagsKHR value )
  9734. {
  9735. if ( !value ) return "{}";
  9736. std::string result;
  9737. if ( value & DisplayPlaneAlphaFlagBitsKHR::eOpaque ) result += "Opaque | ";
  9738. if ( value & DisplayPlaneAlphaFlagBitsKHR::eGlobal ) result += "Global | ";
  9739. if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixel ) result += "PerPixel | ";
  9740. if ( value & DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied ) result += "PerPixelPremultiplied | ";
  9741. return "{ " + result.substr(0, result.size() - 3) + " }";
  9742. }
  9743. enum class DisplaySurfaceCreateFlagBitsKHR : VkFlags
  9744. {};
  9745. VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagBitsKHR )
  9746. {
  9747. return "(void)";
  9748. }
  9749. using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
  9750. VULKAN_HPP_INLINE std::string to_string( DisplaySurfaceCreateFlagsKHR )
  9751. {
  9752. return "{}";
  9753. }
  9754. enum class EventCreateFlagBits : VkFlags
  9755. {};
  9756. VULKAN_HPP_INLINE std::string to_string( EventCreateFlagBits )
  9757. {
  9758. return "(void)";
  9759. }
  9760. using EventCreateFlags = Flags<EventCreateFlagBits>;
  9761. VULKAN_HPP_INLINE std::string to_string( EventCreateFlags )
  9762. {
  9763. return "{}";
  9764. }
  9765. using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
  9766. template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
  9767. {
  9768. enum : VkFlags
  9769. {
  9770. allFlags = VkFlags(ExternalFenceFeatureFlagBits::eExportable) |
  9771. VkFlags(ExternalFenceFeatureFlagBits::eImportable)
  9772. };
  9773. };
  9774. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator|( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9775. {
  9776. return ExternalFenceFeatureFlags( bit0 ) | bit1;
  9777. }
  9778. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator&( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9779. {
  9780. return ExternalFenceFeatureFlags( bit0 ) & bit1;
  9781. }
  9782. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator^( ExternalFenceFeatureFlagBits bit0, ExternalFenceFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9783. {
  9784. return ExternalFenceFeatureFlags( bit0 ) ^ bit1;
  9785. }
  9786. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceFeatureFlags operator~( ExternalFenceFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9787. {
  9788. return ~( ExternalFenceFeatureFlags( bits ) );
  9789. }
  9790. using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
  9791. VULKAN_HPP_INLINE std::string to_string( ExternalFenceFeatureFlags value )
  9792. {
  9793. if ( !value ) return "{}";
  9794. std::string result;
  9795. if ( value & ExternalFenceFeatureFlagBits::eExportable ) result += "Exportable | ";
  9796. if ( value & ExternalFenceFeatureFlagBits::eImportable ) result += "Importable | ";
  9797. return "{ " + result.substr(0, result.size() - 3) + " }";
  9798. }
  9799. using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
  9800. template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
  9801. {
  9802. enum : VkFlags
  9803. {
  9804. allFlags = VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueFd) |
  9805. VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32) |
  9806. VkFlags(ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt) |
  9807. VkFlags(ExternalFenceHandleTypeFlagBits::eSyncFd)
  9808. };
  9809. };
  9810. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator|( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9811. {
  9812. return ExternalFenceHandleTypeFlags( bit0 ) | bit1;
  9813. }
  9814. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator&( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9815. {
  9816. return ExternalFenceHandleTypeFlags( bit0 ) & bit1;
  9817. }
  9818. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator^( ExternalFenceHandleTypeFlagBits bit0, ExternalFenceHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9819. {
  9820. return ExternalFenceHandleTypeFlags( bit0 ) ^ bit1;
  9821. }
  9822. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalFenceHandleTypeFlags operator~( ExternalFenceHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9823. {
  9824. return ~( ExternalFenceHandleTypeFlags( bits ) );
  9825. }
  9826. using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
  9827. VULKAN_HPP_INLINE std::string to_string( ExternalFenceHandleTypeFlags value )
  9828. {
  9829. if ( !value ) return "{}";
  9830. std::string result;
  9831. if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
  9832. if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
  9833. if ( value & ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
  9834. if ( value & ExternalFenceHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
  9835. return "{ " + result.substr(0, result.size() - 3) + " }";
  9836. }
  9837. using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
  9838. template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
  9839. {
  9840. enum : VkFlags
  9841. {
  9842. allFlags = VkFlags(ExternalMemoryFeatureFlagBits::eDedicatedOnly) |
  9843. VkFlags(ExternalMemoryFeatureFlagBits::eExportable) |
  9844. VkFlags(ExternalMemoryFeatureFlagBits::eImportable)
  9845. };
  9846. };
  9847. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator|( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9848. {
  9849. return ExternalMemoryFeatureFlags( bit0 ) | bit1;
  9850. }
  9851. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator&( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9852. {
  9853. return ExternalMemoryFeatureFlags( bit0 ) & bit1;
  9854. }
  9855. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator^( ExternalMemoryFeatureFlagBits bit0, ExternalMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9856. {
  9857. return ExternalMemoryFeatureFlags( bit0 ) ^ bit1;
  9858. }
  9859. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlags operator~( ExternalMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9860. {
  9861. return ~( ExternalMemoryFeatureFlags( bits ) );
  9862. }
  9863. using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
  9864. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlags value )
  9865. {
  9866. if ( !value ) return "{}";
  9867. std::string result;
  9868. if ( value & ExternalMemoryFeatureFlagBits::eDedicatedOnly ) result += "DedicatedOnly | ";
  9869. if ( value & ExternalMemoryFeatureFlagBits::eExportable ) result += "Exportable | ";
  9870. if ( value & ExternalMemoryFeatureFlagBits::eImportable ) result += "Importable | ";
  9871. return "{ " + result.substr(0, result.size() - 3) + " }";
  9872. }
  9873. using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
  9874. template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
  9875. {
  9876. enum : VkFlags
  9877. {
  9878. allFlags = VkFlags(ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly) |
  9879. VkFlags(ExternalMemoryFeatureFlagBitsNV::eExportable) |
  9880. VkFlags(ExternalMemoryFeatureFlagBitsNV::eImportable)
  9881. };
  9882. };
  9883. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator|( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9884. {
  9885. return ExternalMemoryFeatureFlagsNV( bit0 ) | bit1;
  9886. }
  9887. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator&( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9888. {
  9889. return ExternalMemoryFeatureFlagsNV( bit0 ) & bit1;
  9890. }
  9891. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator^( ExternalMemoryFeatureFlagBitsNV bit0, ExternalMemoryFeatureFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9892. {
  9893. return ExternalMemoryFeatureFlagsNV( bit0 ) ^ bit1;
  9894. }
  9895. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryFeatureFlagsNV operator~( ExternalMemoryFeatureFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  9896. {
  9897. return ~( ExternalMemoryFeatureFlagsNV( bits ) );
  9898. }
  9899. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryFeatureFlagsNV value )
  9900. {
  9901. if ( !value ) return "{}";
  9902. std::string result;
  9903. if ( value & ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly ) result += "DedicatedOnly | ";
  9904. if ( value & ExternalMemoryFeatureFlagBitsNV::eExportable ) result += "Exportable | ";
  9905. if ( value & ExternalMemoryFeatureFlagBitsNV::eImportable ) result += "Importable | ";
  9906. return "{ " + result.substr(0, result.size() - 3) + " }";
  9907. }
  9908. using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
  9909. template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
  9910. {
  9911. enum : VkFlags
  9912. {
  9913. allFlags = VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueFd) |
  9914. VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32) |
  9915. VkFlags(ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt) |
  9916. VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11Texture) |
  9917. VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt) |
  9918. VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Heap) |
  9919. VkFlags(ExternalMemoryHandleTypeFlagBits::eD3D12Resource) |
  9920. VkFlags(ExternalMemoryHandleTypeFlagBits::eDmaBufEXT) |
  9921. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  9922. VkFlags(ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID) |
  9923. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  9924. VkFlags(ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT) |
  9925. VkFlags(ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT)
  9926. };
  9927. };
  9928. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator|( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9929. {
  9930. return ExternalMemoryHandleTypeFlags( bit0 ) | bit1;
  9931. }
  9932. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator&( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9933. {
  9934. return ExternalMemoryHandleTypeFlags( bit0 ) & bit1;
  9935. }
  9936. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator^( ExternalMemoryHandleTypeFlagBits bit0, ExternalMemoryHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  9937. {
  9938. return ExternalMemoryHandleTypeFlags( bit0 ) ^ bit1;
  9939. }
  9940. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlags operator~( ExternalMemoryHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  9941. {
  9942. return ~( ExternalMemoryHandleTypeFlags( bits ) );
  9943. }
  9944. using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
  9945. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlags value )
  9946. {
  9947. if ( !value ) return "{}";
  9948. std::string result;
  9949. if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
  9950. if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
  9951. if ( value & ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
  9952. if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11Texture ) result += "D3D11Texture | ";
  9953. if ( value & ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt ) result += "D3D11TextureKmt | ";
  9954. if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Heap ) result += "D3D12Heap | ";
  9955. if ( value & ExternalMemoryHandleTypeFlagBits::eD3D12Resource ) result += "D3D12Resource | ";
  9956. if ( value & ExternalMemoryHandleTypeFlagBits::eDmaBufEXT ) result += "DmaBufEXT | ";
  9957. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  9958. if ( value & ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID ) result += "AndroidHardwareBufferANDROID | ";
  9959. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  9960. if ( value & ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT ) result += "HostAllocationEXT | ";
  9961. if ( value & ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT ) result += "HostMappedForeignMemoryEXT | ";
  9962. return "{ " + result.substr(0, result.size() - 3) + " }";
  9963. }
  9964. using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
  9965. template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
  9966. {
  9967. enum : VkFlags
  9968. {
  9969. allFlags = VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32) |
  9970. VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt) |
  9971. VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image) |
  9972. VkFlags(ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt)
  9973. };
  9974. };
  9975. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator|( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9976. {
  9977. return ExternalMemoryHandleTypeFlagsNV( bit0 ) | bit1;
  9978. }
  9979. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator&( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9980. {
  9981. return ExternalMemoryHandleTypeFlagsNV( bit0 ) & bit1;
  9982. }
  9983. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator^( ExternalMemoryHandleTypeFlagBitsNV bit0, ExternalMemoryHandleTypeFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  9984. {
  9985. return ExternalMemoryHandleTypeFlagsNV( bit0 ) ^ bit1;
  9986. }
  9987. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalMemoryHandleTypeFlagsNV operator~( ExternalMemoryHandleTypeFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  9988. {
  9989. return ~( ExternalMemoryHandleTypeFlagsNV( bits ) );
  9990. }
  9991. VULKAN_HPP_INLINE std::string to_string( ExternalMemoryHandleTypeFlagsNV value )
  9992. {
  9993. if ( !value ) return "{}";
  9994. std::string result;
  9995. if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32 ) result += "OpaqueWin32 | ";
  9996. if ( value & ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
  9997. if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image ) result += "D3D11Image | ";
  9998. if ( value & ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt ) result += "D3D11ImageKmt | ";
  9999. return "{ " + result.substr(0, result.size() - 3) + " }";
  10000. }
  10001. using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
  10002. template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
  10003. {
  10004. enum : VkFlags
  10005. {
  10006. allFlags = VkFlags(ExternalSemaphoreFeatureFlagBits::eExportable) |
  10007. VkFlags(ExternalSemaphoreFeatureFlagBits::eImportable)
  10008. };
  10009. };
  10010. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator|( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10011. {
  10012. return ExternalSemaphoreFeatureFlags( bit0 ) | bit1;
  10013. }
  10014. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator&( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10015. {
  10016. return ExternalSemaphoreFeatureFlags( bit0 ) & bit1;
  10017. }
  10018. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator^( ExternalSemaphoreFeatureFlagBits bit0, ExternalSemaphoreFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10019. {
  10020. return ExternalSemaphoreFeatureFlags( bit0 ) ^ bit1;
  10021. }
  10022. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreFeatureFlags operator~( ExternalSemaphoreFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10023. {
  10024. return ~( ExternalSemaphoreFeatureFlags( bits ) );
  10025. }
  10026. using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
  10027. VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreFeatureFlags value )
  10028. {
  10029. if ( !value ) return "{}";
  10030. std::string result;
  10031. if ( value & ExternalSemaphoreFeatureFlagBits::eExportable ) result += "Exportable | ";
  10032. if ( value & ExternalSemaphoreFeatureFlagBits::eImportable ) result += "Importable | ";
  10033. return "{ " + result.substr(0, result.size() - 3) + " }";
  10034. }
  10035. using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
  10036. template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
  10037. {
  10038. enum : VkFlags
  10039. {
  10040. allFlags = VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) |
  10041. VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32) |
  10042. VkFlags(ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt) |
  10043. VkFlags(ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence) |
  10044. VkFlags(ExternalSemaphoreHandleTypeFlagBits::eSyncFd)
  10045. };
  10046. };
  10047. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator|( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10048. {
  10049. return ExternalSemaphoreHandleTypeFlags( bit0 ) | bit1;
  10050. }
  10051. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator&( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10052. {
  10053. return ExternalSemaphoreHandleTypeFlags( bit0 ) & bit1;
  10054. }
  10055. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator^( ExternalSemaphoreHandleTypeFlagBits bit0, ExternalSemaphoreHandleTypeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10056. {
  10057. return ExternalSemaphoreHandleTypeFlags( bit0 ) ^ bit1;
  10058. }
  10059. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ExternalSemaphoreHandleTypeFlags operator~( ExternalSemaphoreHandleTypeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10060. {
  10061. return ~( ExternalSemaphoreHandleTypeFlags( bits ) );
  10062. }
  10063. using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
  10064. VULKAN_HPP_INLINE std::string to_string( ExternalSemaphoreHandleTypeFlags value )
  10065. {
  10066. if ( !value ) return "{}";
  10067. std::string result;
  10068. if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd ) result += "OpaqueFd | ";
  10069. if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32 ) result += "OpaqueWin32 | ";
  10070. if ( value & ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt ) result += "OpaqueWin32Kmt | ";
  10071. if ( value & ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence ) result += "D3D12Fence | ";
  10072. if ( value & ExternalSemaphoreHandleTypeFlagBits::eSyncFd ) result += "SyncFd | ";
  10073. return "{ " + result.substr(0, result.size() - 3) + " }";
  10074. }
  10075. using FenceCreateFlags = Flags<FenceCreateFlagBits>;
  10076. template <> struct FlagTraits<FenceCreateFlagBits>
  10077. {
  10078. enum : VkFlags
  10079. {
  10080. allFlags = VkFlags(FenceCreateFlagBits::eSignaled)
  10081. };
  10082. };
  10083. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator|( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10084. {
  10085. return FenceCreateFlags( bit0 ) | bit1;
  10086. }
  10087. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator&( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10088. {
  10089. return FenceCreateFlags( bit0 ) & bit1;
  10090. }
  10091. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator^( FenceCreateFlagBits bit0, FenceCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10092. {
  10093. return FenceCreateFlags( bit0 ) ^ bit1;
  10094. }
  10095. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceCreateFlags operator~( FenceCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10096. {
  10097. return ~( FenceCreateFlags( bits ) );
  10098. }
  10099. VULKAN_HPP_INLINE std::string to_string( FenceCreateFlags value )
  10100. {
  10101. if ( !value ) return "{}";
  10102. std::string result;
  10103. if ( value & FenceCreateFlagBits::eSignaled ) result += "Signaled | ";
  10104. return "{ " + result.substr(0, result.size() - 3) + " }";
  10105. }
  10106. using FenceImportFlags = Flags<FenceImportFlagBits>;
  10107. template <> struct FlagTraits<FenceImportFlagBits>
  10108. {
  10109. enum : VkFlags
  10110. {
  10111. allFlags = VkFlags(FenceImportFlagBits::eTemporary)
  10112. };
  10113. };
  10114. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator|( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10115. {
  10116. return FenceImportFlags( bit0 ) | bit1;
  10117. }
  10118. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator&( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10119. {
  10120. return FenceImportFlags( bit0 ) & bit1;
  10121. }
  10122. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator^( FenceImportFlagBits bit0, FenceImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10123. {
  10124. return FenceImportFlags( bit0 ) ^ bit1;
  10125. }
  10126. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FenceImportFlags operator~( FenceImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10127. {
  10128. return ~( FenceImportFlags( bits ) );
  10129. }
  10130. using FenceImportFlagsKHR = FenceImportFlags;
  10131. VULKAN_HPP_INLINE std::string to_string( FenceImportFlags value )
  10132. {
  10133. if ( !value ) return "{}";
  10134. std::string result;
  10135. if ( value & FenceImportFlagBits::eTemporary ) result += "Temporary | ";
  10136. return "{ " + result.substr(0, result.size() - 3) + " }";
  10137. }
  10138. using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
  10139. template <> struct FlagTraits<FormatFeatureFlagBits>
  10140. {
  10141. enum : VkFlags
  10142. {
  10143. allFlags = VkFlags(FormatFeatureFlagBits::eSampledImage) |
  10144. VkFlags(FormatFeatureFlagBits::eStorageImage) |
  10145. VkFlags(FormatFeatureFlagBits::eStorageImageAtomic) |
  10146. VkFlags(FormatFeatureFlagBits::eUniformTexelBuffer) |
  10147. VkFlags(FormatFeatureFlagBits::eStorageTexelBuffer) |
  10148. VkFlags(FormatFeatureFlagBits::eStorageTexelBufferAtomic) |
  10149. VkFlags(FormatFeatureFlagBits::eVertexBuffer) |
  10150. VkFlags(FormatFeatureFlagBits::eColorAttachment) |
  10151. VkFlags(FormatFeatureFlagBits::eColorAttachmentBlend) |
  10152. VkFlags(FormatFeatureFlagBits::eDepthStencilAttachment) |
  10153. VkFlags(FormatFeatureFlagBits::eBlitSrc) |
  10154. VkFlags(FormatFeatureFlagBits::eBlitDst) |
  10155. VkFlags(FormatFeatureFlagBits::eSampledImageFilterLinear) |
  10156. VkFlags(FormatFeatureFlagBits::eTransferSrc) |
  10157. VkFlags(FormatFeatureFlagBits::eTransferDst) |
  10158. VkFlags(FormatFeatureFlagBits::eMidpointChromaSamples) |
  10159. VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter) |
  10160. VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter) |
  10161. VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit) |
  10162. VkFlags(FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable) |
  10163. VkFlags(FormatFeatureFlagBits::eDisjoint) |
  10164. VkFlags(FormatFeatureFlagBits::eCositedChromaSamples) |
  10165. VkFlags(FormatFeatureFlagBits::eSampledImageFilterMinmax) |
  10166. VkFlags(FormatFeatureFlagBits::eSampledImageFilterCubicIMG) |
  10167. VkFlags(FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR) |
  10168. VkFlags(FormatFeatureFlagBits::eFragmentDensityMapEXT) |
  10169. VkFlags(FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR)
  10170. };
  10171. };
  10172. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator|( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10173. {
  10174. return FormatFeatureFlags( bit0 ) | bit1;
  10175. }
  10176. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator&( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10177. {
  10178. return FormatFeatureFlags( bit0 ) & bit1;
  10179. }
  10180. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator^( FormatFeatureFlagBits bit0, FormatFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10181. {
  10182. return FormatFeatureFlags( bit0 ) ^ bit1;
  10183. }
  10184. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FormatFeatureFlags operator~( FormatFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10185. {
  10186. return ~( FormatFeatureFlags( bits ) );
  10187. }
  10188. VULKAN_HPP_INLINE std::string to_string( FormatFeatureFlags value )
  10189. {
  10190. if ( !value ) return "{}";
  10191. std::string result;
  10192. if ( value & FormatFeatureFlagBits::eSampledImage ) result += "SampledImage | ";
  10193. if ( value & FormatFeatureFlagBits::eStorageImage ) result += "StorageImage | ";
  10194. if ( value & FormatFeatureFlagBits::eStorageImageAtomic ) result += "StorageImageAtomic | ";
  10195. if ( value & FormatFeatureFlagBits::eUniformTexelBuffer ) result += "UniformTexelBuffer | ";
  10196. if ( value & FormatFeatureFlagBits::eStorageTexelBuffer ) result += "StorageTexelBuffer | ";
  10197. if ( value & FormatFeatureFlagBits::eStorageTexelBufferAtomic ) result += "StorageTexelBufferAtomic | ";
  10198. if ( value & FormatFeatureFlagBits::eVertexBuffer ) result += "VertexBuffer | ";
  10199. if ( value & FormatFeatureFlagBits::eColorAttachment ) result += "ColorAttachment | ";
  10200. if ( value & FormatFeatureFlagBits::eColorAttachmentBlend ) result += "ColorAttachmentBlend | ";
  10201. if ( value & FormatFeatureFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
  10202. if ( value & FormatFeatureFlagBits::eBlitSrc ) result += "BlitSrc | ";
  10203. if ( value & FormatFeatureFlagBits::eBlitDst ) result += "BlitDst | ";
  10204. if ( value & FormatFeatureFlagBits::eSampledImageFilterLinear ) result += "SampledImageFilterLinear | ";
  10205. if ( value & FormatFeatureFlagBits::eTransferSrc ) result += "TransferSrc | ";
  10206. if ( value & FormatFeatureFlagBits::eTransferDst ) result += "TransferDst | ";
  10207. if ( value & FormatFeatureFlagBits::eMidpointChromaSamples ) result += "MidpointChromaSamples | ";
  10208. if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter ) result += "SampledImageYcbcrConversionLinearFilter | ";
  10209. if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter ) result += "SampledImageYcbcrConversionSeparateReconstructionFilter | ";
  10210. if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit ) result += "SampledImageYcbcrConversionChromaReconstructionExplicit | ";
  10211. if ( value & FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable ) result += "SampledImageYcbcrConversionChromaReconstructionExplicitForceable | ";
  10212. if ( value & FormatFeatureFlagBits::eDisjoint ) result += "Disjoint | ";
  10213. if ( value & FormatFeatureFlagBits::eCositedChromaSamples ) result += "CositedChromaSamples | ";
  10214. if ( value & FormatFeatureFlagBits::eSampledImageFilterMinmax ) result += "SampledImageFilterMinmax | ";
  10215. if ( value & FormatFeatureFlagBits::eSampledImageFilterCubicIMG ) result += "SampledImageFilterCubicIMG | ";
  10216. if ( value & FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR ) result += "AccelerationStructureVertexBufferKHR | ";
  10217. if ( value & FormatFeatureFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
  10218. if ( value & FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR ) result += "FragmentShadingRateAttachmentKHR | ";
  10219. return "{ " + result.substr(0, result.size() - 3) + " }";
  10220. }
  10221. using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
  10222. template <> struct FlagTraits<FramebufferCreateFlagBits>
  10223. {
  10224. enum : VkFlags
  10225. {
  10226. allFlags = VkFlags(FramebufferCreateFlagBits::eImageless)
  10227. };
  10228. };
  10229. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator|( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10230. {
  10231. return FramebufferCreateFlags( bit0 ) | bit1;
  10232. }
  10233. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator&( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10234. {
  10235. return FramebufferCreateFlags( bit0 ) & bit1;
  10236. }
  10237. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator^( FramebufferCreateFlagBits bit0, FramebufferCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10238. {
  10239. return FramebufferCreateFlags( bit0 ) ^ bit1;
  10240. }
  10241. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR FramebufferCreateFlags operator~( FramebufferCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10242. {
  10243. return ~( FramebufferCreateFlags( bits ) );
  10244. }
  10245. VULKAN_HPP_INLINE std::string to_string( FramebufferCreateFlags value )
  10246. {
  10247. if ( !value ) return "{}";
  10248. std::string result;
  10249. if ( value & FramebufferCreateFlagBits::eImageless ) result += "Imageless | ";
  10250. return "{ " + result.substr(0, result.size() - 3) + " }";
  10251. }
  10252. using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
  10253. template <> struct FlagTraits<GeometryFlagBitsKHR>
  10254. {
  10255. enum : VkFlags
  10256. {
  10257. allFlags = VkFlags(GeometryFlagBitsKHR::eOpaque) |
  10258. VkFlags(GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation)
  10259. };
  10260. };
  10261. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator|( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10262. {
  10263. return GeometryFlagsKHR( bit0 ) | bit1;
  10264. }
  10265. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator&( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10266. {
  10267. return GeometryFlagsKHR( bit0 ) & bit1;
  10268. }
  10269. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator^( GeometryFlagBitsKHR bit0, GeometryFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10270. {
  10271. return GeometryFlagsKHR( bit0 ) ^ bit1;
  10272. }
  10273. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryFlagsKHR operator~( GeometryFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  10274. {
  10275. return ~( GeometryFlagsKHR( bits ) );
  10276. }
  10277. using GeometryFlagsNV = GeometryFlagsKHR;
  10278. VULKAN_HPP_INLINE std::string to_string( GeometryFlagsKHR value )
  10279. {
  10280. if ( !value ) return "{}";
  10281. std::string result;
  10282. if ( value & GeometryFlagBitsKHR::eOpaque ) result += "Opaque | ";
  10283. if ( value & GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation ) result += "NoDuplicateAnyHitInvocation | ";
  10284. return "{ " + result.substr(0, result.size() - 3) + " }";
  10285. }
  10286. using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
  10287. template <> struct FlagTraits<GeometryInstanceFlagBitsKHR>
  10288. {
  10289. enum : VkFlags
  10290. {
  10291. allFlags = VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable) |
  10292. VkFlags(GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise) |
  10293. VkFlags(GeometryInstanceFlagBitsKHR::eForceOpaque) |
  10294. VkFlags(GeometryInstanceFlagBitsKHR::eForceNoOpaque)
  10295. };
  10296. };
  10297. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator|( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10298. {
  10299. return GeometryInstanceFlagsKHR( bit0 ) | bit1;
  10300. }
  10301. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator&( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10302. {
  10303. return GeometryInstanceFlagsKHR( bit0 ) & bit1;
  10304. }
  10305. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator^( GeometryInstanceFlagBitsKHR bit0, GeometryInstanceFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10306. {
  10307. return GeometryInstanceFlagsKHR( bit0 ) ^ bit1;
  10308. }
  10309. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR GeometryInstanceFlagsKHR operator~( GeometryInstanceFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  10310. {
  10311. return ~( GeometryInstanceFlagsKHR( bits ) );
  10312. }
  10313. using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
  10314. VULKAN_HPP_INLINE std::string to_string( GeometryInstanceFlagsKHR value )
  10315. {
  10316. if ( !value ) return "{}";
  10317. std::string result;
  10318. if ( value & GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable ) result += "TriangleFacingCullDisable | ";
  10319. if ( value & GeometryInstanceFlagBitsKHR::eTriangleFrontCounterclockwise ) result += "TriangleFrontCounterclockwise | ";
  10320. if ( value & GeometryInstanceFlagBitsKHR::eForceOpaque ) result += "ForceOpaque | ";
  10321. if ( value & GeometryInstanceFlagBitsKHR::eForceNoOpaque ) result += "ForceNoOpaque | ";
  10322. return "{ " + result.substr(0, result.size() - 3) + " }";
  10323. }
  10324. enum class HeadlessSurfaceCreateFlagBitsEXT : VkFlags
  10325. {};
  10326. VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagBitsEXT )
  10327. {
  10328. return "(void)";
  10329. }
  10330. using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
  10331. VULKAN_HPP_INLINE std::string to_string( HeadlessSurfaceCreateFlagsEXT )
  10332. {
  10333. return "{}";
  10334. }
  10335. #ifdef VK_USE_PLATFORM_IOS_MVK
  10336. enum class IOSSurfaceCreateFlagBitsMVK : VkFlags
  10337. {};
  10338. VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagBitsMVK )
  10339. {
  10340. return "(void)";
  10341. }
  10342. using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
  10343. VULKAN_HPP_INLINE std::string to_string( IOSSurfaceCreateFlagsMVK )
  10344. {
  10345. return "{}";
  10346. }
  10347. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  10348. using ImageAspectFlags = Flags<ImageAspectFlagBits>;
  10349. template <> struct FlagTraits<ImageAspectFlagBits>
  10350. {
  10351. enum : VkFlags
  10352. {
  10353. allFlags = VkFlags(ImageAspectFlagBits::eColor) |
  10354. VkFlags(ImageAspectFlagBits::eDepth) |
  10355. VkFlags(ImageAspectFlagBits::eStencil) |
  10356. VkFlags(ImageAspectFlagBits::eMetadata) |
  10357. VkFlags(ImageAspectFlagBits::ePlane0) |
  10358. VkFlags(ImageAspectFlagBits::ePlane1) |
  10359. VkFlags(ImageAspectFlagBits::ePlane2) |
  10360. VkFlags(ImageAspectFlagBits::eMemoryPlane0EXT) |
  10361. VkFlags(ImageAspectFlagBits::eMemoryPlane1EXT) |
  10362. VkFlags(ImageAspectFlagBits::eMemoryPlane2EXT) |
  10363. VkFlags(ImageAspectFlagBits::eMemoryPlane3EXT)
  10364. };
  10365. };
  10366. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator|( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10367. {
  10368. return ImageAspectFlags( bit0 ) | bit1;
  10369. }
  10370. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator&( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10371. {
  10372. return ImageAspectFlags( bit0 ) & bit1;
  10373. }
  10374. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator^( ImageAspectFlagBits bit0, ImageAspectFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10375. {
  10376. return ImageAspectFlags( bit0 ) ^ bit1;
  10377. }
  10378. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageAspectFlags operator~( ImageAspectFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10379. {
  10380. return ~( ImageAspectFlags( bits ) );
  10381. }
  10382. VULKAN_HPP_INLINE std::string to_string( ImageAspectFlags value )
  10383. {
  10384. if ( !value ) return "{}";
  10385. std::string result;
  10386. if ( value & ImageAspectFlagBits::eColor ) result += "Color | ";
  10387. if ( value & ImageAspectFlagBits::eDepth ) result += "Depth | ";
  10388. if ( value & ImageAspectFlagBits::eStencil ) result += "Stencil | ";
  10389. if ( value & ImageAspectFlagBits::eMetadata ) result += "Metadata | ";
  10390. if ( value & ImageAspectFlagBits::ePlane0 ) result += "Plane0 | ";
  10391. if ( value & ImageAspectFlagBits::ePlane1 ) result += "Plane1 | ";
  10392. if ( value & ImageAspectFlagBits::ePlane2 ) result += "Plane2 | ";
  10393. if ( value & ImageAspectFlagBits::eMemoryPlane0EXT ) result += "MemoryPlane0EXT | ";
  10394. if ( value & ImageAspectFlagBits::eMemoryPlane1EXT ) result += "MemoryPlane1EXT | ";
  10395. if ( value & ImageAspectFlagBits::eMemoryPlane2EXT ) result += "MemoryPlane2EXT | ";
  10396. if ( value & ImageAspectFlagBits::eMemoryPlane3EXT ) result += "MemoryPlane3EXT | ";
  10397. return "{ " + result.substr(0, result.size() - 3) + " }";
  10398. }
  10399. using ImageCreateFlags = Flags<ImageCreateFlagBits>;
  10400. template <> struct FlagTraits<ImageCreateFlagBits>
  10401. {
  10402. enum : VkFlags
  10403. {
  10404. allFlags = VkFlags(ImageCreateFlagBits::eSparseBinding) |
  10405. VkFlags(ImageCreateFlagBits::eSparseResidency) |
  10406. VkFlags(ImageCreateFlagBits::eSparseAliased) |
  10407. VkFlags(ImageCreateFlagBits::eMutableFormat) |
  10408. VkFlags(ImageCreateFlagBits::eCubeCompatible) |
  10409. VkFlags(ImageCreateFlagBits::eAlias) |
  10410. VkFlags(ImageCreateFlagBits::eSplitInstanceBindRegions) |
  10411. VkFlags(ImageCreateFlagBits::e2DArrayCompatible) |
  10412. VkFlags(ImageCreateFlagBits::eBlockTexelViewCompatible) |
  10413. VkFlags(ImageCreateFlagBits::eExtendedUsage) |
  10414. VkFlags(ImageCreateFlagBits::eProtected) |
  10415. VkFlags(ImageCreateFlagBits::eDisjoint) |
  10416. VkFlags(ImageCreateFlagBits::eCornerSampledNV) |
  10417. VkFlags(ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT) |
  10418. VkFlags(ImageCreateFlagBits::eSubsampledEXT)
  10419. };
  10420. };
  10421. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator|( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10422. {
  10423. return ImageCreateFlags( bit0 ) | bit1;
  10424. }
  10425. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator&( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10426. {
  10427. return ImageCreateFlags( bit0 ) & bit1;
  10428. }
  10429. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator^( ImageCreateFlagBits bit0, ImageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10430. {
  10431. return ImageCreateFlags( bit0 ) ^ bit1;
  10432. }
  10433. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageCreateFlags operator~( ImageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10434. {
  10435. return ~( ImageCreateFlags( bits ) );
  10436. }
  10437. VULKAN_HPP_INLINE std::string to_string( ImageCreateFlags value )
  10438. {
  10439. if ( !value ) return "{}";
  10440. std::string result;
  10441. if ( value & ImageCreateFlagBits::eSparseBinding ) result += "SparseBinding | ";
  10442. if ( value & ImageCreateFlagBits::eSparseResidency ) result += "SparseResidency | ";
  10443. if ( value & ImageCreateFlagBits::eSparseAliased ) result += "SparseAliased | ";
  10444. if ( value & ImageCreateFlagBits::eMutableFormat ) result += "MutableFormat | ";
  10445. if ( value & ImageCreateFlagBits::eCubeCompatible ) result += "CubeCompatible | ";
  10446. if ( value & ImageCreateFlagBits::eAlias ) result += "Alias | ";
  10447. if ( value & ImageCreateFlagBits::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
  10448. if ( value & ImageCreateFlagBits::e2DArrayCompatible ) result += "2DArrayCompatible | ";
  10449. if ( value & ImageCreateFlagBits::eBlockTexelViewCompatible ) result += "BlockTexelViewCompatible | ";
  10450. if ( value & ImageCreateFlagBits::eExtendedUsage ) result += "ExtendedUsage | ";
  10451. if ( value & ImageCreateFlagBits::eProtected ) result += "Protected | ";
  10452. if ( value & ImageCreateFlagBits::eDisjoint ) result += "Disjoint | ";
  10453. if ( value & ImageCreateFlagBits::eCornerSampledNV ) result += "CornerSampledNV | ";
  10454. if ( value & ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT ) result += "SampleLocationsCompatibleDepthEXT | ";
  10455. if ( value & ImageCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
  10456. return "{ " + result.substr(0, result.size() - 3) + " }";
  10457. }
  10458. #ifdef VK_USE_PLATFORM_FUCHSIA
  10459. enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkFlags
  10460. {};
  10461. VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagBitsFUCHSIA )
  10462. {
  10463. return "(void)";
  10464. }
  10465. using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
  10466. VULKAN_HPP_INLINE std::string to_string( ImagePipeSurfaceCreateFlagsFUCHSIA )
  10467. {
  10468. return "{}";
  10469. }
  10470. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  10471. using ImageUsageFlags = Flags<ImageUsageFlagBits>;
  10472. template <> struct FlagTraits<ImageUsageFlagBits>
  10473. {
  10474. enum : VkFlags
  10475. {
  10476. allFlags = VkFlags(ImageUsageFlagBits::eTransferSrc) |
  10477. VkFlags(ImageUsageFlagBits::eTransferDst) |
  10478. VkFlags(ImageUsageFlagBits::eSampled) |
  10479. VkFlags(ImageUsageFlagBits::eStorage) |
  10480. VkFlags(ImageUsageFlagBits::eColorAttachment) |
  10481. VkFlags(ImageUsageFlagBits::eDepthStencilAttachment) |
  10482. VkFlags(ImageUsageFlagBits::eTransientAttachment) |
  10483. VkFlags(ImageUsageFlagBits::eInputAttachment) |
  10484. VkFlags(ImageUsageFlagBits::eShadingRateImageNV) |
  10485. VkFlags(ImageUsageFlagBits::eFragmentDensityMapEXT)
  10486. };
  10487. };
  10488. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator|( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10489. {
  10490. return ImageUsageFlags( bit0 ) | bit1;
  10491. }
  10492. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator&( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10493. {
  10494. return ImageUsageFlags( bit0 ) & bit1;
  10495. }
  10496. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator^( ImageUsageFlagBits bit0, ImageUsageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10497. {
  10498. return ImageUsageFlags( bit0 ) ^ bit1;
  10499. }
  10500. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageUsageFlags operator~( ImageUsageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10501. {
  10502. return ~( ImageUsageFlags( bits ) );
  10503. }
  10504. VULKAN_HPP_INLINE std::string to_string( ImageUsageFlags value )
  10505. {
  10506. if ( !value ) return "{}";
  10507. std::string result;
  10508. if ( value & ImageUsageFlagBits::eTransferSrc ) result += "TransferSrc | ";
  10509. if ( value & ImageUsageFlagBits::eTransferDst ) result += "TransferDst | ";
  10510. if ( value & ImageUsageFlagBits::eSampled ) result += "Sampled | ";
  10511. if ( value & ImageUsageFlagBits::eStorage ) result += "Storage | ";
  10512. if ( value & ImageUsageFlagBits::eColorAttachment ) result += "ColorAttachment | ";
  10513. if ( value & ImageUsageFlagBits::eDepthStencilAttachment ) result += "DepthStencilAttachment | ";
  10514. if ( value & ImageUsageFlagBits::eTransientAttachment ) result += "TransientAttachment | ";
  10515. if ( value & ImageUsageFlagBits::eInputAttachment ) result += "InputAttachment | ";
  10516. if ( value & ImageUsageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
  10517. if ( value & ImageUsageFlagBits::eFragmentDensityMapEXT ) result += "FragmentDensityMapEXT | ";
  10518. return "{ " + result.substr(0, result.size() - 3) + " }";
  10519. }
  10520. using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
  10521. template <> struct FlagTraits<ImageViewCreateFlagBits>
  10522. {
  10523. enum : VkFlags
  10524. {
  10525. allFlags = VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT) |
  10526. VkFlags(ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT)
  10527. };
  10528. };
  10529. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator|( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10530. {
  10531. return ImageViewCreateFlags( bit0 ) | bit1;
  10532. }
  10533. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator&( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10534. {
  10535. return ImageViewCreateFlags( bit0 ) & bit1;
  10536. }
  10537. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator^( ImageViewCreateFlagBits bit0, ImageViewCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10538. {
  10539. return ImageViewCreateFlags( bit0 ) ^ bit1;
  10540. }
  10541. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ImageViewCreateFlags operator~( ImageViewCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10542. {
  10543. return ~( ImageViewCreateFlags( bits ) );
  10544. }
  10545. VULKAN_HPP_INLINE std::string to_string( ImageViewCreateFlags value )
  10546. {
  10547. if ( !value ) return "{}";
  10548. std::string result;
  10549. if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT ) result += "FragmentDensityMapDynamicEXT | ";
  10550. if ( value & ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT ) result += "FragmentDensityMapDeferredEXT | ";
  10551. return "{ " + result.substr(0, result.size() - 3) + " }";
  10552. }
  10553. using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
  10554. template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
  10555. {
  10556. enum : VkFlags
  10557. {
  10558. allFlags = VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess) |
  10559. VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences) |
  10560. VkFlags(IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences)
  10561. };
  10562. };
  10563. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator|( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10564. {
  10565. return IndirectCommandsLayoutUsageFlagsNV( bit0 ) | bit1;
  10566. }
  10567. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator&( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10568. {
  10569. return IndirectCommandsLayoutUsageFlagsNV( bit0 ) & bit1;
  10570. }
  10571. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator^( IndirectCommandsLayoutUsageFlagBitsNV bit0, IndirectCommandsLayoutUsageFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10572. {
  10573. return IndirectCommandsLayoutUsageFlagsNV( bit0 ) ^ bit1;
  10574. }
  10575. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV operator~( IndirectCommandsLayoutUsageFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  10576. {
  10577. return ~( IndirectCommandsLayoutUsageFlagsNV( bits ) );
  10578. }
  10579. VULKAN_HPP_INLINE std::string to_string( IndirectCommandsLayoutUsageFlagsNV value )
  10580. {
  10581. if ( !value ) return "{}";
  10582. std::string result;
  10583. if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess ) result += "ExplicitPreprocess | ";
  10584. if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences ) result += "IndexedSequences | ";
  10585. if ( value & IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences ) result += "UnorderedSequences | ";
  10586. return "{ " + result.substr(0, result.size() - 3) + " }";
  10587. }
  10588. using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
  10589. template <> struct FlagTraits<IndirectStateFlagBitsNV>
  10590. {
  10591. enum : VkFlags
  10592. {
  10593. allFlags = VkFlags(IndirectStateFlagBitsNV::eFlagFrontface)
  10594. };
  10595. };
  10596. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator|( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10597. {
  10598. return IndirectStateFlagsNV( bit0 ) | bit1;
  10599. }
  10600. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator&( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10601. {
  10602. return IndirectStateFlagsNV( bit0 ) & bit1;
  10603. }
  10604. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator^( IndirectStateFlagBitsNV bit0, IndirectStateFlagBitsNV bit1 ) VULKAN_HPP_NOEXCEPT
  10605. {
  10606. return IndirectStateFlagsNV( bit0 ) ^ bit1;
  10607. }
  10608. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR IndirectStateFlagsNV operator~( IndirectStateFlagBitsNV bits ) VULKAN_HPP_NOEXCEPT
  10609. {
  10610. return ~( IndirectStateFlagsNV( bits ) );
  10611. }
  10612. VULKAN_HPP_INLINE std::string to_string( IndirectStateFlagsNV value )
  10613. {
  10614. if ( !value ) return "{}";
  10615. std::string result;
  10616. if ( value & IndirectStateFlagBitsNV::eFlagFrontface ) result += "FlagFrontface | ";
  10617. return "{ " + result.substr(0, result.size() - 3) + " }";
  10618. }
  10619. using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
  10620. VULKAN_HPP_INLINE std::string to_string( InstanceCreateFlags )
  10621. {
  10622. return "{}";
  10623. }
  10624. #ifdef VK_USE_PLATFORM_MACOS_MVK
  10625. enum class MacOSSurfaceCreateFlagBitsMVK : VkFlags
  10626. {};
  10627. VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagBitsMVK )
  10628. {
  10629. return "(void)";
  10630. }
  10631. using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
  10632. VULKAN_HPP_INLINE std::string to_string( MacOSSurfaceCreateFlagsMVK )
  10633. {
  10634. return "{}";
  10635. }
  10636. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  10637. using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
  10638. template <> struct FlagTraits<MemoryAllocateFlagBits>
  10639. {
  10640. enum : VkFlags
  10641. {
  10642. allFlags = VkFlags(MemoryAllocateFlagBits::eDeviceMask) |
  10643. VkFlags(MemoryAllocateFlagBits::eDeviceAddress) |
  10644. VkFlags(MemoryAllocateFlagBits::eDeviceAddressCaptureReplay)
  10645. };
  10646. };
  10647. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator|( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10648. {
  10649. return MemoryAllocateFlags( bit0 ) | bit1;
  10650. }
  10651. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator&( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10652. {
  10653. return MemoryAllocateFlags( bit0 ) & bit1;
  10654. }
  10655. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator^( MemoryAllocateFlagBits bit0, MemoryAllocateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10656. {
  10657. return MemoryAllocateFlags( bit0 ) ^ bit1;
  10658. }
  10659. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryAllocateFlags operator~( MemoryAllocateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10660. {
  10661. return ~( MemoryAllocateFlags( bits ) );
  10662. }
  10663. using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
  10664. VULKAN_HPP_INLINE std::string to_string( MemoryAllocateFlags value )
  10665. {
  10666. if ( !value ) return "{}";
  10667. std::string result;
  10668. if ( value & MemoryAllocateFlagBits::eDeviceMask ) result += "DeviceMask | ";
  10669. if ( value & MemoryAllocateFlagBits::eDeviceAddress ) result += "DeviceAddress | ";
  10670. if ( value & MemoryAllocateFlagBits::eDeviceAddressCaptureReplay ) result += "DeviceAddressCaptureReplay | ";
  10671. return "{ " + result.substr(0, result.size() - 3) + " }";
  10672. }
  10673. using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
  10674. template <> struct FlagTraits<MemoryHeapFlagBits>
  10675. {
  10676. enum : VkFlags
  10677. {
  10678. allFlags = VkFlags(MemoryHeapFlagBits::eDeviceLocal) |
  10679. VkFlags(MemoryHeapFlagBits::eMultiInstance)
  10680. };
  10681. };
  10682. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator|( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10683. {
  10684. return MemoryHeapFlags( bit0 ) | bit1;
  10685. }
  10686. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator&( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10687. {
  10688. return MemoryHeapFlags( bit0 ) & bit1;
  10689. }
  10690. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator^( MemoryHeapFlagBits bit0, MemoryHeapFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10691. {
  10692. return MemoryHeapFlags( bit0 ) ^ bit1;
  10693. }
  10694. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryHeapFlags operator~( MemoryHeapFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10695. {
  10696. return ~( MemoryHeapFlags( bits ) );
  10697. }
  10698. VULKAN_HPP_INLINE std::string to_string( MemoryHeapFlags value )
  10699. {
  10700. if ( !value ) return "{}";
  10701. std::string result;
  10702. if ( value & MemoryHeapFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
  10703. if ( value & MemoryHeapFlagBits::eMultiInstance ) result += "MultiInstance | ";
  10704. return "{ " + result.substr(0, result.size() - 3) + " }";
  10705. }
  10706. enum class MemoryMapFlagBits : VkFlags
  10707. {};
  10708. VULKAN_HPP_INLINE std::string to_string( MemoryMapFlagBits )
  10709. {
  10710. return "(void)";
  10711. }
  10712. using MemoryMapFlags = Flags<MemoryMapFlagBits>;
  10713. VULKAN_HPP_INLINE std::string to_string( MemoryMapFlags )
  10714. {
  10715. return "{}";
  10716. }
  10717. using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
  10718. template <> struct FlagTraits<MemoryPropertyFlagBits>
  10719. {
  10720. enum : VkFlags
  10721. {
  10722. allFlags = VkFlags(MemoryPropertyFlagBits::eDeviceLocal) |
  10723. VkFlags(MemoryPropertyFlagBits::eHostVisible) |
  10724. VkFlags(MemoryPropertyFlagBits::eHostCoherent) |
  10725. VkFlags(MemoryPropertyFlagBits::eHostCached) |
  10726. VkFlags(MemoryPropertyFlagBits::eLazilyAllocated) |
  10727. VkFlags(MemoryPropertyFlagBits::eProtected) |
  10728. VkFlags(MemoryPropertyFlagBits::eDeviceCoherentAMD) |
  10729. VkFlags(MemoryPropertyFlagBits::eDeviceUncachedAMD)
  10730. };
  10731. };
  10732. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator|( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10733. {
  10734. return MemoryPropertyFlags( bit0 ) | bit1;
  10735. }
  10736. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator&( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10737. {
  10738. return MemoryPropertyFlags( bit0 ) & bit1;
  10739. }
  10740. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator^( MemoryPropertyFlagBits bit0, MemoryPropertyFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10741. {
  10742. return MemoryPropertyFlags( bit0 ) ^ bit1;
  10743. }
  10744. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR MemoryPropertyFlags operator~( MemoryPropertyFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10745. {
  10746. return ~( MemoryPropertyFlags( bits ) );
  10747. }
  10748. VULKAN_HPP_INLINE std::string to_string( MemoryPropertyFlags value )
  10749. {
  10750. if ( !value ) return "{}";
  10751. std::string result;
  10752. if ( value & MemoryPropertyFlagBits::eDeviceLocal ) result += "DeviceLocal | ";
  10753. if ( value & MemoryPropertyFlagBits::eHostVisible ) result += "HostVisible | ";
  10754. if ( value & MemoryPropertyFlagBits::eHostCoherent ) result += "HostCoherent | ";
  10755. if ( value & MemoryPropertyFlagBits::eHostCached ) result += "HostCached | ";
  10756. if ( value & MemoryPropertyFlagBits::eLazilyAllocated ) result += "LazilyAllocated | ";
  10757. if ( value & MemoryPropertyFlagBits::eProtected ) result += "Protected | ";
  10758. if ( value & MemoryPropertyFlagBits::eDeviceCoherentAMD ) result += "DeviceCoherentAMD | ";
  10759. if ( value & MemoryPropertyFlagBits::eDeviceUncachedAMD ) result += "DeviceUncachedAMD | ";
  10760. return "{ " + result.substr(0, result.size() - 3) + " }";
  10761. }
  10762. #ifdef VK_USE_PLATFORM_METAL_EXT
  10763. enum class MetalSurfaceCreateFlagBitsEXT : VkFlags
  10764. {};
  10765. VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagBitsEXT )
  10766. {
  10767. return "(void)";
  10768. }
  10769. using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
  10770. VULKAN_HPP_INLINE std::string to_string( MetalSurfaceCreateFlagsEXT )
  10771. {
  10772. return "{}";
  10773. }
  10774. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  10775. using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
  10776. template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
  10777. {
  10778. enum : VkFlags
  10779. {
  10780. allFlags = VkFlags(PeerMemoryFeatureFlagBits::eCopySrc) |
  10781. VkFlags(PeerMemoryFeatureFlagBits::eCopyDst) |
  10782. VkFlags(PeerMemoryFeatureFlagBits::eGenericSrc) |
  10783. VkFlags(PeerMemoryFeatureFlagBits::eGenericDst)
  10784. };
  10785. };
  10786. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator|( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10787. {
  10788. return PeerMemoryFeatureFlags( bit0 ) | bit1;
  10789. }
  10790. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator&( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10791. {
  10792. return PeerMemoryFeatureFlags( bit0 ) & bit1;
  10793. }
  10794. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator^( PeerMemoryFeatureFlagBits bit0, PeerMemoryFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10795. {
  10796. return PeerMemoryFeatureFlags( bit0 ) ^ bit1;
  10797. }
  10798. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PeerMemoryFeatureFlags operator~( PeerMemoryFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10799. {
  10800. return ~( PeerMemoryFeatureFlags( bits ) );
  10801. }
  10802. using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
  10803. VULKAN_HPP_INLINE std::string to_string( PeerMemoryFeatureFlags value )
  10804. {
  10805. if ( !value ) return "{}";
  10806. std::string result;
  10807. if ( value & PeerMemoryFeatureFlagBits::eCopySrc ) result += "CopySrc | ";
  10808. if ( value & PeerMemoryFeatureFlagBits::eCopyDst ) result += "CopyDst | ";
  10809. if ( value & PeerMemoryFeatureFlagBits::eGenericSrc ) result += "GenericSrc | ";
  10810. if ( value & PeerMemoryFeatureFlagBits::eGenericDst ) result += "GenericDst | ";
  10811. return "{ " + result.substr(0, result.size() - 3) + " }";
  10812. }
  10813. using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
  10814. template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
  10815. {
  10816. enum : VkFlags
  10817. {
  10818. allFlags = VkFlags(PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting) |
  10819. VkFlags(PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted)
  10820. };
  10821. };
  10822. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator|( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10823. {
  10824. return PerformanceCounterDescriptionFlagsKHR( bit0 ) | bit1;
  10825. }
  10826. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator&( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10827. {
  10828. return PerformanceCounterDescriptionFlagsKHR( bit0 ) & bit1;
  10829. }
  10830. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator^( PerformanceCounterDescriptionFlagBitsKHR bit0, PerformanceCounterDescriptionFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  10831. {
  10832. return PerformanceCounterDescriptionFlagsKHR( bit0 ) ^ bit1;
  10833. }
  10834. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PerformanceCounterDescriptionFlagsKHR operator~( PerformanceCounterDescriptionFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  10835. {
  10836. return ~( PerformanceCounterDescriptionFlagsKHR( bits ) );
  10837. }
  10838. VULKAN_HPP_INLINE std::string to_string( PerformanceCounterDescriptionFlagsKHR value )
  10839. {
  10840. if ( !value ) return "{}";
  10841. std::string result;
  10842. if ( value & PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting ) result += "PerformanceImpacting | ";
  10843. if ( value & PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted ) result += "ConcurrentlyImpacted | ";
  10844. return "{ " + result.substr(0, result.size() - 3) + " }";
  10845. }
  10846. using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
  10847. template <> struct FlagTraits<PipelineCacheCreateFlagBits>
  10848. {
  10849. enum : VkFlags
  10850. {
  10851. allFlags = VkFlags(PipelineCacheCreateFlagBits::eExternallySynchronizedEXT)
  10852. };
  10853. };
  10854. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator|( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10855. {
  10856. return PipelineCacheCreateFlags( bit0 ) | bit1;
  10857. }
  10858. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator&( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10859. {
  10860. return PipelineCacheCreateFlags( bit0 ) & bit1;
  10861. }
  10862. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator^( PipelineCacheCreateFlagBits bit0, PipelineCacheCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10863. {
  10864. return PipelineCacheCreateFlags( bit0 ) ^ bit1;
  10865. }
  10866. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCacheCreateFlags operator~( PipelineCacheCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10867. {
  10868. return ~( PipelineCacheCreateFlags( bits ) );
  10869. }
  10870. VULKAN_HPP_INLINE std::string to_string( PipelineCacheCreateFlags value )
  10871. {
  10872. if ( !value ) return "{}";
  10873. std::string result;
  10874. if ( value & PipelineCacheCreateFlagBits::eExternallySynchronizedEXT ) result += "ExternallySynchronizedEXT | ";
  10875. return "{ " + result.substr(0, result.size() - 3) + " }";
  10876. }
  10877. enum class PipelineColorBlendStateCreateFlagBits : VkFlags
  10878. {};
  10879. VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlagBits )
  10880. {
  10881. return "(void)";
  10882. }
  10883. using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
  10884. VULKAN_HPP_INLINE std::string to_string( PipelineColorBlendStateCreateFlags )
  10885. {
  10886. return "{}";
  10887. }
  10888. using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
  10889. VULKAN_HPP_INLINE std::string to_string( PipelineCompilerControlFlagsAMD )
  10890. {
  10891. return "{}";
  10892. }
  10893. enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkFlags
  10894. {};
  10895. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagBitsNV )
  10896. {
  10897. return "(void)";
  10898. }
  10899. using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
  10900. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageModulationStateCreateFlagsNV )
  10901. {
  10902. return "{}";
  10903. }
  10904. enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkFlags
  10905. {};
  10906. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagBitsNV )
  10907. {
  10908. return "(void)";
  10909. }
  10910. using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
  10911. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageReductionStateCreateFlagsNV )
  10912. {
  10913. return "{}";
  10914. }
  10915. enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkFlags
  10916. {};
  10917. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagBitsNV )
  10918. {
  10919. return "(void)";
  10920. }
  10921. using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
  10922. VULKAN_HPP_INLINE std::string to_string( PipelineCoverageToColorStateCreateFlagsNV )
  10923. {
  10924. return "{}";
  10925. }
  10926. using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
  10927. template <> struct FlagTraits<PipelineCreateFlagBits>
  10928. {
  10929. enum : VkFlags
  10930. {
  10931. allFlags = VkFlags(PipelineCreateFlagBits::eDisableOptimization) |
  10932. VkFlags(PipelineCreateFlagBits::eAllowDerivatives) |
  10933. VkFlags(PipelineCreateFlagBits::eDerivative) |
  10934. VkFlags(PipelineCreateFlagBits::eViewIndexFromDeviceIndex) |
  10935. VkFlags(PipelineCreateFlagBits::eDispatchBase) |
  10936. VkFlags(PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR) |
  10937. VkFlags(PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR) |
  10938. VkFlags(PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR) |
  10939. VkFlags(PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR) |
  10940. VkFlags(PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR) |
  10941. VkFlags(PipelineCreateFlagBits::eRayTracingSkipAabbsKHR) |
  10942. VkFlags(PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR) |
  10943. VkFlags(PipelineCreateFlagBits::eDeferCompileNV) |
  10944. VkFlags(PipelineCreateFlagBits::eCaptureStatisticsKHR) |
  10945. VkFlags(PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR) |
  10946. VkFlags(PipelineCreateFlagBits::eIndirectBindableNV) |
  10947. VkFlags(PipelineCreateFlagBits::eLibraryKHR) |
  10948. VkFlags(PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT) |
  10949. VkFlags(PipelineCreateFlagBits::eEarlyReturnOnFailureEXT)
  10950. };
  10951. };
  10952. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator|( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10953. {
  10954. return PipelineCreateFlags( bit0 ) | bit1;
  10955. }
  10956. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator&( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10957. {
  10958. return PipelineCreateFlags( bit0 ) & bit1;
  10959. }
  10960. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator^( PipelineCreateFlagBits bit0, PipelineCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  10961. {
  10962. return PipelineCreateFlags( bit0 ) ^ bit1;
  10963. }
  10964. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreateFlags operator~( PipelineCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  10965. {
  10966. return ~( PipelineCreateFlags( bits ) );
  10967. }
  10968. VULKAN_HPP_INLINE std::string to_string( PipelineCreateFlags value )
  10969. {
  10970. if ( !value ) return "{}";
  10971. std::string result;
  10972. if ( value & PipelineCreateFlagBits::eDisableOptimization ) result += "DisableOptimization | ";
  10973. if ( value & PipelineCreateFlagBits::eAllowDerivatives ) result += "AllowDerivatives | ";
  10974. if ( value & PipelineCreateFlagBits::eDerivative ) result += "Derivative | ";
  10975. if ( value & PipelineCreateFlagBits::eViewIndexFromDeviceIndex ) result += "ViewIndexFromDeviceIndex | ";
  10976. if ( value & PipelineCreateFlagBits::eDispatchBase ) result += "DispatchBase | ";
  10977. if ( value & PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR ) result += "RayTracingNoNullAnyHitShadersKHR | ";
  10978. if ( value & PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR ) result += "RayTracingNoNullClosestHitShadersKHR | ";
  10979. if ( value & PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR ) result += "RayTracingNoNullMissShadersKHR | ";
  10980. if ( value & PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR ) result += "RayTracingNoNullIntersectionShadersKHR | ";
  10981. if ( value & PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR ) result += "RayTracingSkipTrianglesKHR | ";
  10982. if ( value & PipelineCreateFlagBits::eRayTracingSkipAabbsKHR ) result += "RayTracingSkipAabbsKHR | ";
  10983. if ( value & PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR ) result += "RayTracingShaderGroupHandleCaptureReplayKHR | ";
  10984. if ( value & PipelineCreateFlagBits::eDeferCompileNV ) result += "DeferCompileNV | ";
  10985. if ( value & PipelineCreateFlagBits::eCaptureStatisticsKHR ) result += "CaptureStatisticsKHR | ";
  10986. if ( value & PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR ) result += "CaptureInternalRepresentationsKHR | ";
  10987. if ( value & PipelineCreateFlagBits::eIndirectBindableNV ) result += "IndirectBindableNV | ";
  10988. if ( value & PipelineCreateFlagBits::eLibraryKHR ) result += "LibraryKHR | ";
  10989. if ( value & PipelineCreateFlagBits::eFailOnPipelineCompileRequiredEXT ) result += "FailOnPipelineCompileRequiredEXT | ";
  10990. if ( value & PipelineCreateFlagBits::eEarlyReturnOnFailureEXT ) result += "EarlyReturnOnFailureEXT | ";
  10991. return "{ " + result.substr(0, result.size() - 3) + " }";
  10992. }
  10993. using PipelineCreationFeedbackFlagsEXT = Flags<PipelineCreationFeedbackFlagBitsEXT>;
  10994. template <> struct FlagTraits<PipelineCreationFeedbackFlagBitsEXT>
  10995. {
  10996. enum : VkFlags
  10997. {
  10998. allFlags = VkFlags(PipelineCreationFeedbackFlagBitsEXT::eValid) |
  10999. VkFlags(PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit) |
  11000. VkFlags(PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration)
  11001. };
  11002. };
  11003. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator|( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11004. {
  11005. return PipelineCreationFeedbackFlagsEXT( bit0 ) | bit1;
  11006. }
  11007. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator&( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11008. {
  11009. return PipelineCreationFeedbackFlagsEXT( bit0 ) & bit1;
  11010. }
  11011. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator^( PipelineCreationFeedbackFlagBitsEXT bit0, PipelineCreationFeedbackFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11012. {
  11013. return PipelineCreationFeedbackFlagsEXT( bit0 ) ^ bit1;
  11014. }
  11015. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackFlagsEXT operator~( PipelineCreationFeedbackFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  11016. {
  11017. return ~( PipelineCreationFeedbackFlagsEXT( bits ) );
  11018. }
  11019. VULKAN_HPP_INLINE std::string to_string( PipelineCreationFeedbackFlagsEXT value )
  11020. {
  11021. if ( !value ) return "{}";
  11022. std::string result;
  11023. if ( value & PipelineCreationFeedbackFlagBitsEXT::eValid ) result += "Valid | ";
  11024. if ( value & PipelineCreationFeedbackFlagBitsEXT::eApplicationPipelineCacheHit ) result += "ApplicationPipelineCacheHit | ";
  11025. if ( value & PipelineCreationFeedbackFlagBitsEXT::eBasePipelineAcceleration ) result += "BasePipelineAcceleration | ";
  11026. return "{ " + result.substr(0, result.size() - 3) + " }";
  11027. }
  11028. enum class PipelineDepthStencilStateCreateFlagBits : VkFlags
  11029. {};
  11030. VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlagBits )
  11031. {
  11032. return "(void)";
  11033. }
  11034. using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
  11035. VULKAN_HPP_INLINE std::string to_string( PipelineDepthStencilStateCreateFlags )
  11036. {
  11037. return "{}";
  11038. }
  11039. enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkFlags
  11040. {};
  11041. VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagBitsEXT )
  11042. {
  11043. return "(void)";
  11044. }
  11045. using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
  11046. VULKAN_HPP_INLINE std::string to_string( PipelineDiscardRectangleStateCreateFlagsEXT )
  11047. {
  11048. return "{}";
  11049. }
  11050. enum class PipelineDynamicStateCreateFlagBits : VkFlags
  11051. {};
  11052. VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlagBits )
  11053. {
  11054. return "(void)";
  11055. }
  11056. using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
  11057. VULKAN_HPP_INLINE std::string to_string( PipelineDynamicStateCreateFlags )
  11058. {
  11059. return "{}";
  11060. }
  11061. enum class PipelineInputAssemblyStateCreateFlagBits : VkFlags
  11062. {};
  11063. VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlagBits )
  11064. {
  11065. return "(void)";
  11066. }
  11067. using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
  11068. VULKAN_HPP_INLINE std::string to_string( PipelineInputAssemblyStateCreateFlags )
  11069. {
  11070. return "{}";
  11071. }
  11072. enum class PipelineLayoutCreateFlagBits : VkFlags
  11073. {};
  11074. VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlagBits )
  11075. {
  11076. return "(void)";
  11077. }
  11078. using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
  11079. VULKAN_HPP_INLINE std::string to_string( PipelineLayoutCreateFlags )
  11080. {
  11081. return "{}";
  11082. }
  11083. enum class PipelineMultisampleStateCreateFlagBits : VkFlags
  11084. {};
  11085. VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlagBits )
  11086. {
  11087. return "(void)";
  11088. }
  11089. using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
  11090. VULKAN_HPP_INLINE std::string to_string( PipelineMultisampleStateCreateFlags )
  11091. {
  11092. return "{}";
  11093. }
  11094. enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkFlags
  11095. {};
  11096. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagBitsEXT )
  11097. {
  11098. return "(void)";
  11099. }
  11100. using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
  11101. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationConservativeStateCreateFlagsEXT )
  11102. {
  11103. return "{}";
  11104. }
  11105. enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkFlags
  11106. {};
  11107. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagBitsEXT )
  11108. {
  11109. return "(void)";
  11110. }
  11111. using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
  11112. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationDepthClipStateCreateFlagsEXT )
  11113. {
  11114. return "{}";
  11115. }
  11116. enum class PipelineRasterizationStateCreateFlagBits : VkFlags
  11117. {};
  11118. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlagBits )
  11119. {
  11120. return "(void)";
  11121. }
  11122. using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
  11123. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateCreateFlags )
  11124. {
  11125. return "{}";
  11126. }
  11127. enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkFlags
  11128. {};
  11129. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagBitsEXT )
  11130. {
  11131. return "(void)";
  11132. }
  11133. using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
  11134. VULKAN_HPP_INLINE std::string to_string( PipelineRasterizationStateStreamCreateFlagsEXT )
  11135. {
  11136. return "{}";
  11137. }
  11138. using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
  11139. template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
  11140. {
  11141. enum : VkFlags
  11142. {
  11143. allFlags = VkFlags(PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT) |
  11144. VkFlags(PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT)
  11145. };
  11146. };
  11147. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator|( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11148. {
  11149. return PipelineShaderStageCreateFlags( bit0 ) | bit1;
  11150. }
  11151. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator&( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11152. {
  11153. return PipelineShaderStageCreateFlags( bit0 ) & bit1;
  11154. }
  11155. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator^( PipelineShaderStageCreateFlagBits bit0, PipelineShaderStageCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11156. {
  11157. return PipelineShaderStageCreateFlags( bit0 ) ^ bit1;
  11158. }
  11159. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateFlags operator~( PipelineShaderStageCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11160. {
  11161. return ~( PipelineShaderStageCreateFlags( bits ) );
  11162. }
  11163. VULKAN_HPP_INLINE std::string to_string( PipelineShaderStageCreateFlags value )
  11164. {
  11165. if ( !value ) return "{}";
  11166. std::string result;
  11167. if ( value & PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSizeEXT ) result += "AllowVaryingSubgroupSizeEXT | ";
  11168. if ( value & PipelineShaderStageCreateFlagBits::eRequireFullSubgroupsEXT ) result += "RequireFullSubgroupsEXT | ";
  11169. return "{ " + result.substr(0, result.size() - 3) + " }";
  11170. }
  11171. using PipelineStageFlags = Flags<PipelineStageFlagBits>;
  11172. template <> struct FlagTraits<PipelineStageFlagBits>
  11173. {
  11174. enum : VkFlags
  11175. {
  11176. allFlags = VkFlags(PipelineStageFlagBits::eTopOfPipe) |
  11177. VkFlags(PipelineStageFlagBits::eDrawIndirect) |
  11178. VkFlags(PipelineStageFlagBits::eVertexInput) |
  11179. VkFlags(PipelineStageFlagBits::eVertexShader) |
  11180. VkFlags(PipelineStageFlagBits::eTessellationControlShader) |
  11181. VkFlags(PipelineStageFlagBits::eTessellationEvaluationShader) |
  11182. VkFlags(PipelineStageFlagBits::eGeometryShader) |
  11183. VkFlags(PipelineStageFlagBits::eFragmentShader) |
  11184. VkFlags(PipelineStageFlagBits::eEarlyFragmentTests) |
  11185. VkFlags(PipelineStageFlagBits::eLateFragmentTests) |
  11186. VkFlags(PipelineStageFlagBits::eColorAttachmentOutput) |
  11187. VkFlags(PipelineStageFlagBits::eComputeShader) |
  11188. VkFlags(PipelineStageFlagBits::eTransfer) |
  11189. VkFlags(PipelineStageFlagBits::eBottomOfPipe) |
  11190. VkFlags(PipelineStageFlagBits::eHost) |
  11191. VkFlags(PipelineStageFlagBits::eAllGraphics) |
  11192. VkFlags(PipelineStageFlagBits::eAllCommands) |
  11193. VkFlags(PipelineStageFlagBits::eTransformFeedbackEXT) |
  11194. VkFlags(PipelineStageFlagBits::eConditionalRenderingEXT) |
  11195. VkFlags(PipelineStageFlagBits::eAccelerationStructureBuildKHR) |
  11196. VkFlags(PipelineStageFlagBits::eRayTracingShaderKHR) |
  11197. VkFlags(PipelineStageFlagBits::eShadingRateImageNV) |
  11198. VkFlags(PipelineStageFlagBits::eTaskShaderNV) |
  11199. VkFlags(PipelineStageFlagBits::eMeshShaderNV) |
  11200. VkFlags(PipelineStageFlagBits::eFragmentDensityProcessEXT) |
  11201. VkFlags(PipelineStageFlagBits::eCommandPreprocessNV)
  11202. };
  11203. };
  11204. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator|( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11205. {
  11206. return PipelineStageFlags( bit0 ) | bit1;
  11207. }
  11208. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator&( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11209. {
  11210. return PipelineStageFlags( bit0 ) & bit1;
  11211. }
  11212. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator^( PipelineStageFlagBits bit0, PipelineStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11213. {
  11214. return PipelineStageFlags( bit0 ) ^ bit1;
  11215. }
  11216. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR PipelineStageFlags operator~( PipelineStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11217. {
  11218. return ~( PipelineStageFlags( bits ) );
  11219. }
  11220. VULKAN_HPP_INLINE std::string to_string( PipelineStageFlags value )
  11221. {
  11222. if ( !value ) return "{}";
  11223. std::string result;
  11224. if ( value & PipelineStageFlagBits::eTopOfPipe ) result += "TopOfPipe | ";
  11225. if ( value & PipelineStageFlagBits::eDrawIndirect ) result += "DrawIndirect | ";
  11226. if ( value & PipelineStageFlagBits::eVertexInput ) result += "VertexInput | ";
  11227. if ( value & PipelineStageFlagBits::eVertexShader ) result += "VertexShader | ";
  11228. if ( value & PipelineStageFlagBits::eTessellationControlShader ) result += "TessellationControlShader | ";
  11229. if ( value & PipelineStageFlagBits::eTessellationEvaluationShader ) result += "TessellationEvaluationShader | ";
  11230. if ( value & PipelineStageFlagBits::eGeometryShader ) result += "GeometryShader | ";
  11231. if ( value & PipelineStageFlagBits::eFragmentShader ) result += "FragmentShader | ";
  11232. if ( value & PipelineStageFlagBits::eEarlyFragmentTests ) result += "EarlyFragmentTests | ";
  11233. if ( value & PipelineStageFlagBits::eLateFragmentTests ) result += "LateFragmentTests | ";
  11234. if ( value & PipelineStageFlagBits::eColorAttachmentOutput ) result += "ColorAttachmentOutput | ";
  11235. if ( value & PipelineStageFlagBits::eComputeShader ) result += "ComputeShader | ";
  11236. if ( value & PipelineStageFlagBits::eTransfer ) result += "Transfer | ";
  11237. if ( value & PipelineStageFlagBits::eBottomOfPipe ) result += "BottomOfPipe | ";
  11238. if ( value & PipelineStageFlagBits::eHost ) result += "Host | ";
  11239. if ( value & PipelineStageFlagBits::eAllGraphics ) result += "AllGraphics | ";
  11240. if ( value & PipelineStageFlagBits::eAllCommands ) result += "AllCommands | ";
  11241. if ( value & PipelineStageFlagBits::eTransformFeedbackEXT ) result += "TransformFeedbackEXT | ";
  11242. if ( value & PipelineStageFlagBits::eConditionalRenderingEXT ) result += "ConditionalRenderingEXT | ";
  11243. if ( value & PipelineStageFlagBits::eAccelerationStructureBuildKHR ) result += "AccelerationStructureBuildKHR | ";
  11244. if ( value & PipelineStageFlagBits::eRayTracingShaderKHR ) result += "RayTracingShaderKHR | ";
  11245. if ( value & PipelineStageFlagBits::eShadingRateImageNV ) result += "ShadingRateImageNV | ";
  11246. if ( value & PipelineStageFlagBits::eTaskShaderNV ) result += "TaskShaderNV | ";
  11247. if ( value & PipelineStageFlagBits::eMeshShaderNV ) result += "MeshShaderNV | ";
  11248. if ( value & PipelineStageFlagBits::eFragmentDensityProcessEXT ) result += "FragmentDensityProcessEXT | ";
  11249. if ( value & PipelineStageFlagBits::eCommandPreprocessNV ) result += "CommandPreprocessNV | ";
  11250. return "{ " + result.substr(0, result.size() - 3) + " }";
  11251. }
  11252. enum class PipelineTessellationStateCreateFlagBits : VkFlags
  11253. {};
  11254. VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlagBits )
  11255. {
  11256. return "(void)";
  11257. }
  11258. using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
  11259. VULKAN_HPP_INLINE std::string to_string( PipelineTessellationStateCreateFlags )
  11260. {
  11261. return "{}";
  11262. }
  11263. enum class PipelineVertexInputStateCreateFlagBits : VkFlags
  11264. {};
  11265. VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlagBits )
  11266. {
  11267. return "(void)";
  11268. }
  11269. using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
  11270. VULKAN_HPP_INLINE std::string to_string( PipelineVertexInputStateCreateFlags )
  11271. {
  11272. return "{}";
  11273. }
  11274. enum class PipelineViewportStateCreateFlagBits : VkFlags
  11275. {};
  11276. VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlagBits )
  11277. {
  11278. return "(void)";
  11279. }
  11280. using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
  11281. VULKAN_HPP_INLINE std::string to_string( PipelineViewportStateCreateFlags )
  11282. {
  11283. return "{}";
  11284. }
  11285. enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkFlags
  11286. {};
  11287. VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagBitsNV )
  11288. {
  11289. return "(void)";
  11290. }
  11291. using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
  11292. VULKAN_HPP_INLINE std::string to_string( PipelineViewportSwizzleStateCreateFlagsNV )
  11293. {
  11294. return "{}";
  11295. }
  11296. using PrivateDataSlotCreateFlagsEXT = Flags<PrivateDataSlotCreateFlagBitsEXT>;
  11297. VULKAN_HPP_INLINE std::string to_string( PrivateDataSlotCreateFlagsEXT )
  11298. {
  11299. return "{}";
  11300. }
  11301. using QueryControlFlags = Flags<QueryControlFlagBits>;
  11302. template <> struct FlagTraits<QueryControlFlagBits>
  11303. {
  11304. enum : VkFlags
  11305. {
  11306. allFlags = VkFlags(QueryControlFlagBits::ePrecise)
  11307. };
  11308. };
  11309. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator|( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11310. {
  11311. return QueryControlFlags( bit0 ) | bit1;
  11312. }
  11313. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator&( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11314. {
  11315. return QueryControlFlags( bit0 ) & bit1;
  11316. }
  11317. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator^( QueryControlFlagBits bit0, QueryControlFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11318. {
  11319. return QueryControlFlags( bit0 ) ^ bit1;
  11320. }
  11321. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryControlFlags operator~( QueryControlFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11322. {
  11323. return ~( QueryControlFlags( bits ) );
  11324. }
  11325. VULKAN_HPP_INLINE std::string to_string( QueryControlFlags value )
  11326. {
  11327. if ( !value ) return "{}";
  11328. std::string result;
  11329. if ( value & QueryControlFlagBits::ePrecise ) result += "Precise | ";
  11330. return "{ " + result.substr(0, result.size() - 3) + " }";
  11331. }
  11332. using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
  11333. template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
  11334. {
  11335. enum : VkFlags
  11336. {
  11337. allFlags = VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyVertices) |
  11338. VkFlags(QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives) |
  11339. VkFlags(QueryPipelineStatisticFlagBits::eVertexShaderInvocations) |
  11340. VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderInvocations) |
  11341. VkFlags(QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives) |
  11342. VkFlags(QueryPipelineStatisticFlagBits::eClippingInvocations) |
  11343. VkFlags(QueryPipelineStatisticFlagBits::eClippingPrimitives) |
  11344. VkFlags(QueryPipelineStatisticFlagBits::eFragmentShaderInvocations) |
  11345. VkFlags(QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches) |
  11346. VkFlags(QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations) |
  11347. VkFlags(QueryPipelineStatisticFlagBits::eComputeShaderInvocations)
  11348. };
  11349. };
  11350. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator|( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11351. {
  11352. return QueryPipelineStatisticFlags( bit0 ) | bit1;
  11353. }
  11354. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator&( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11355. {
  11356. return QueryPipelineStatisticFlags( bit0 ) & bit1;
  11357. }
  11358. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator^( QueryPipelineStatisticFlagBits bit0, QueryPipelineStatisticFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11359. {
  11360. return QueryPipelineStatisticFlags( bit0 ) ^ bit1;
  11361. }
  11362. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryPipelineStatisticFlags operator~( QueryPipelineStatisticFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11363. {
  11364. return ~( QueryPipelineStatisticFlags( bits ) );
  11365. }
  11366. VULKAN_HPP_INLINE std::string to_string( QueryPipelineStatisticFlags value )
  11367. {
  11368. if ( !value ) return "{}";
  11369. std::string result;
  11370. if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyVertices ) result += "InputAssemblyVertices | ";
  11371. if ( value & QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives ) result += "InputAssemblyPrimitives | ";
  11372. if ( value & QueryPipelineStatisticFlagBits::eVertexShaderInvocations ) result += "VertexShaderInvocations | ";
  11373. if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderInvocations ) result += "GeometryShaderInvocations | ";
  11374. if ( value & QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives ) result += "GeometryShaderPrimitives | ";
  11375. if ( value & QueryPipelineStatisticFlagBits::eClippingInvocations ) result += "ClippingInvocations | ";
  11376. if ( value & QueryPipelineStatisticFlagBits::eClippingPrimitives ) result += "ClippingPrimitives | ";
  11377. if ( value & QueryPipelineStatisticFlagBits::eFragmentShaderInvocations ) result += "FragmentShaderInvocations | ";
  11378. if ( value & QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches ) result += "TessellationControlShaderPatches | ";
  11379. if ( value & QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations ) result += "TessellationEvaluationShaderInvocations | ";
  11380. if ( value & QueryPipelineStatisticFlagBits::eComputeShaderInvocations ) result += "ComputeShaderInvocations | ";
  11381. return "{ " + result.substr(0, result.size() - 3) + " }";
  11382. }
  11383. using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
  11384. VULKAN_HPP_INLINE std::string to_string( QueryPoolCreateFlags )
  11385. {
  11386. return "{}";
  11387. }
  11388. using QueryResultFlags = Flags<QueryResultFlagBits>;
  11389. template <> struct FlagTraits<QueryResultFlagBits>
  11390. {
  11391. enum : VkFlags
  11392. {
  11393. allFlags = VkFlags(QueryResultFlagBits::e64) |
  11394. VkFlags(QueryResultFlagBits::eWait) |
  11395. VkFlags(QueryResultFlagBits::eWithAvailability) |
  11396. VkFlags(QueryResultFlagBits::ePartial)
  11397. };
  11398. };
  11399. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator|( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11400. {
  11401. return QueryResultFlags( bit0 ) | bit1;
  11402. }
  11403. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator&( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11404. {
  11405. return QueryResultFlags( bit0 ) & bit1;
  11406. }
  11407. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator^( QueryResultFlagBits bit0, QueryResultFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11408. {
  11409. return QueryResultFlags( bit0 ) ^ bit1;
  11410. }
  11411. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueryResultFlags operator~( QueryResultFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11412. {
  11413. return ~( QueryResultFlags( bits ) );
  11414. }
  11415. VULKAN_HPP_INLINE std::string to_string( QueryResultFlags value )
  11416. {
  11417. if ( !value ) return "{}";
  11418. std::string result;
  11419. if ( value & QueryResultFlagBits::e64 ) result += "64 | ";
  11420. if ( value & QueryResultFlagBits::eWait ) result += "Wait | ";
  11421. if ( value & QueryResultFlagBits::eWithAvailability ) result += "WithAvailability | ";
  11422. if ( value & QueryResultFlagBits::ePartial ) result += "Partial | ";
  11423. return "{ " + result.substr(0, result.size() - 3) + " }";
  11424. }
  11425. using QueueFlags = Flags<QueueFlagBits>;
  11426. template <> struct FlagTraits<QueueFlagBits>
  11427. {
  11428. enum : VkFlags
  11429. {
  11430. allFlags = VkFlags(QueueFlagBits::eGraphics) |
  11431. VkFlags(QueueFlagBits::eCompute) |
  11432. VkFlags(QueueFlagBits::eTransfer) |
  11433. VkFlags(QueueFlagBits::eSparseBinding) |
  11434. VkFlags(QueueFlagBits::eProtected)
  11435. };
  11436. };
  11437. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator|( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11438. {
  11439. return QueueFlags( bit0 ) | bit1;
  11440. }
  11441. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator&( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11442. {
  11443. return QueueFlags( bit0 ) & bit1;
  11444. }
  11445. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator^( QueueFlagBits bit0, QueueFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11446. {
  11447. return QueueFlags( bit0 ) ^ bit1;
  11448. }
  11449. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR QueueFlags operator~( QueueFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11450. {
  11451. return ~( QueueFlags( bits ) );
  11452. }
  11453. VULKAN_HPP_INLINE std::string to_string( QueueFlags value )
  11454. {
  11455. if ( !value ) return "{}";
  11456. std::string result;
  11457. if ( value & QueueFlagBits::eGraphics ) result += "Graphics | ";
  11458. if ( value & QueueFlagBits::eCompute ) result += "Compute | ";
  11459. if ( value & QueueFlagBits::eTransfer ) result += "Transfer | ";
  11460. if ( value & QueueFlagBits::eSparseBinding ) result += "SparseBinding | ";
  11461. if ( value & QueueFlagBits::eProtected ) result += "Protected | ";
  11462. return "{ " + result.substr(0, result.size() - 3) + " }";
  11463. }
  11464. using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
  11465. template <> struct FlagTraits<RenderPassCreateFlagBits>
  11466. {
  11467. enum : VkFlags
  11468. {
  11469. allFlags = VkFlags(RenderPassCreateFlagBits::eTransformQCOM)
  11470. };
  11471. };
  11472. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator|( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11473. {
  11474. return RenderPassCreateFlags( bit0 ) | bit1;
  11475. }
  11476. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator&( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11477. {
  11478. return RenderPassCreateFlags( bit0 ) & bit1;
  11479. }
  11480. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator^( RenderPassCreateFlagBits bit0, RenderPassCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11481. {
  11482. return RenderPassCreateFlags( bit0 ) ^ bit1;
  11483. }
  11484. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR RenderPassCreateFlags operator~( RenderPassCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11485. {
  11486. return ~( RenderPassCreateFlags( bits ) );
  11487. }
  11488. VULKAN_HPP_INLINE std::string to_string( RenderPassCreateFlags value )
  11489. {
  11490. if ( !value ) return "{}";
  11491. std::string result;
  11492. if ( value & RenderPassCreateFlagBits::eTransformQCOM ) result += "TransformQCOM | ";
  11493. return "{ " + result.substr(0, result.size() - 3) + " }";
  11494. }
  11495. using ResolveModeFlags = Flags<ResolveModeFlagBits>;
  11496. template <> struct FlagTraits<ResolveModeFlagBits>
  11497. {
  11498. enum : VkFlags
  11499. {
  11500. allFlags = VkFlags(ResolveModeFlagBits::eNone) |
  11501. VkFlags(ResolveModeFlagBits::eSampleZero) |
  11502. VkFlags(ResolveModeFlagBits::eAverage) |
  11503. VkFlags(ResolveModeFlagBits::eMin) |
  11504. VkFlags(ResolveModeFlagBits::eMax)
  11505. };
  11506. };
  11507. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator|( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11508. {
  11509. return ResolveModeFlags( bit0 ) | bit1;
  11510. }
  11511. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator&( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11512. {
  11513. return ResolveModeFlags( bit0 ) & bit1;
  11514. }
  11515. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator^( ResolveModeFlagBits bit0, ResolveModeFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11516. {
  11517. return ResolveModeFlags( bit0 ) ^ bit1;
  11518. }
  11519. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ResolveModeFlags operator~( ResolveModeFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11520. {
  11521. return ~( ResolveModeFlags( bits ) );
  11522. }
  11523. using ResolveModeFlagsKHR = ResolveModeFlags;
  11524. VULKAN_HPP_INLINE std::string to_string( ResolveModeFlags value )
  11525. {
  11526. if ( !value ) return "{}";
  11527. std::string result;
  11528. if ( value & ResolveModeFlagBits::eSampleZero ) result += "SampleZero | ";
  11529. if ( value & ResolveModeFlagBits::eAverage ) result += "Average | ";
  11530. if ( value & ResolveModeFlagBits::eMin ) result += "Min | ";
  11531. if ( value & ResolveModeFlagBits::eMax ) result += "Max | ";
  11532. return "{ " + result.substr(0, result.size() - 3) + " }";
  11533. }
  11534. using SampleCountFlags = Flags<SampleCountFlagBits>;
  11535. template <> struct FlagTraits<SampleCountFlagBits>
  11536. {
  11537. enum : VkFlags
  11538. {
  11539. allFlags = VkFlags(SampleCountFlagBits::e1) |
  11540. VkFlags(SampleCountFlagBits::e2) |
  11541. VkFlags(SampleCountFlagBits::e4) |
  11542. VkFlags(SampleCountFlagBits::e8) |
  11543. VkFlags(SampleCountFlagBits::e16) |
  11544. VkFlags(SampleCountFlagBits::e32) |
  11545. VkFlags(SampleCountFlagBits::e64)
  11546. };
  11547. };
  11548. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator|( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11549. {
  11550. return SampleCountFlags( bit0 ) | bit1;
  11551. }
  11552. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator&( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11553. {
  11554. return SampleCountFlags( bit0 ) & bit1;
  11555. }
  11556. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator^( SampleCountFlagBits bit0, SampleCountFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11557. {
  11558. return SampleCountFlags( bit0 ) ^ bit1;
  11559. }
  11560. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SampleCountFlags operator~( SampleCountFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11561. {
  11562. return ~( SampleCountFlags( bits ) );
  11563. }
  11564. VULKAN_HPP_INLINE std::string to_string( SampleCountFlags value )
  11565. {
  11566. if ( !value ) return "{}";
  11567. std::string result;
  11568. if ( value & SampleCountFlagBits::e1 ) result += "1 | ";
  11569. if ( value & SampleCountFlagBits::e2 ) result += "2 | ";
  11570. if ( value & SampleCountFlagBits::e4 ) result += "4 | ";
  11571. if ( value & SampleCountFlagBits::e8 ) result += "8 | ";
  11572. if ( value & SampleCountFlagBits::e16 ) result += "16 | ";
  11573. if ( value & SampleCountFlagBits::e32 ) result += "32 | ";
  11574. if ( value & SampleCountFlagBits::e64 ) result += "64 | ";
  11575. return "{ " + result.substr(0, result.size() - 3) + " }";
  11576. }
  11577. using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
  11578. template <> struct FlagTraits<SamplerCreateFlagBits>
  11579. {
  11580. enum : VkFlags
  11581. {
  11582. allFlags = VkFlags(SamplerCreateFlagBits::eSubsampledEXT) |
  11583. VkFlags(SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT)
  11584. };
  11585. };
  11586. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator|( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11587. {
  11588. return SamplerCreateFlags( bit0 ) | bit1;
  11589. }
  11590. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator&( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11591. {
  11592. return SamplerCreateFlags( bit0 ) & bit1;
  11593. }
  11594. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator^( SamplerCreateFlagBits bit0, SamplerCreateFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11595. {
  11596. return SamplerCreateFlags( bit0 ) ^ bit1;
  11597. }
  11598. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SamplerCreateFlags operator~( SamplerCreateFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11599. {
  11600. return ~( SamplerCreateFlags( bits ) );
  11601. }
  11602. VULKAN_HPP_INLINE std::string to_string( SamplerCreateFlags value )
  11603. {
  11604. if ( !value ) return "{}";
  11605. std::string result;
  11606. if ( value & SamplerCreateFlagBits::eSubsampledEXT ) result += "SubsampledEXT | ";
  11607. if ( value & SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT ) result += "SubsampledCoarseReconstructionEXT | ";
  11608. return "{ " + result.substr(0, result.size() - 3) + " }";
  11609. }
  11610. enum class SemaphoreCreateFlagBits : VkFlags
  11611. {};
  11612. VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlagBits )
  11613. {
  11614. return "(void)";
  11615. }
  11616. using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
  11617. VULKAN_HPP_INLINE std::string to_string( SemaphoreCreateFlags )
  11618. {
  11619. return "{}";
  11620. }
  11621. using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
  11622. template <> struct FlagTraits<SemaphoreImportFlagBits>
  11623. {
  11624. enum : VkFlags
  11625. {
  11626. allFlags = VkFlags(SemaphoreImportFlagBits::eTemporary)
  11627. };
  11628. };
  11629. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator|( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11630. {
  11631. return SemaphoreImportFlags( bit0 ) | bit1;
  11632. }
  11633. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator&( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11634. {
  11635. return SemaphoreImportFlags( bit0 ) & bit1;
  11636. }
  11637. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator^( SemaphoreImportFlagBits bit0, SemaphoreImportFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11638. {
  11639. return SemaphoreImportFlags( bit0 ) ^ bit1;
  11640. }
  11641. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreImportFlags operator~( SemaphoreImportFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11642. {
  11643. return ~( SemaphoreImportFlags( bits ) );
  11644. }
  11645. using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
  11646. VULKAN_HPP_INLINE std::string to_string( SemaphoreImportFlags value )
  11647. {
  11648. if ( !value ) return "{}";
  11649. std::string result;
  11650. if ( value & SemaphoreImportFlagBits::eTemporary ) result += "Temporary | ";
  11651. return "{ " + result.substr(0, result.size() - 3) + " }";
  11652. }
  11653. using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
  11654. template <> struct FlagTraits<SemaphoreWaitFlagBits>
  11655. {
  11656. enum : VkFlags
  11657. {
  11658. allFlags = VkFlags(SemaphoreWaitFlagBits::eAny)
  11659. };
  11660. };
  11661. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator|( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11662. {
  11663. return SemaphoreWaitFlags( bit0 ) | bit1;
  11664. }
  11665. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator&( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11666. {
  11667. return SemaphoreWaitFlags( bit0 ) & bit1;
  11668. }
  11669. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator^( SemaphoreWaitFlagBits bit0, SemaphoreWaitFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11670. {
  11671. return SemaphoreWaitFlags( bit0 ) ^ bit1;
  11672. }
  11673. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SemaphoreWaitFlags operator~( SemaphoreWaitFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11674. {
  11675. return ~( SemaphoreWaitFlags( bits ) );
  11676. }
  11677. using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
  11678. VULKAN_HPP_INLINE std::string to_string( SemaphoreWaitFlags value )
  11679. {
  11680. if ( !value ) return "{}";
  11681. std::string result;
  11682. if ( value & SemaphoreWaitFlagBits::eAny ) result += "Any | ";
  11683. return "{ " + result.substr(0, result.size() - 3) + " }";
  11684. }
  11685. using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
  11686. VULKAN_HPP_INLINE std::string to_string( ShaderCorePropertiesFlagsAMD )
  11687. {
  11688. return "{}";
  11689. }
  11690. using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
  11691. VULKAN_HPP_INLINE std::string to_string( ShaderModuleCreateFlags )
  11692. {
  11693. return "{}";
  11694. }
  11695. using ShaderStageFlags = Flags<ShaderStageFlagBits>;
  11696. template <> struct FlagTraits<ShaderStageFlagBits>
  11697. {
  11698. enum : VkFlags
  11699. {
  11700. allFlags = VkFlags(ShaderStageFlagBits::eVertex) |
  11701. VkFlags(ShaderStageFlagBits::eTessellationControl) |
  11702. VkFlags(ShaderStageFlagBits::eTessellationEvaluation) |
  11703. VkFlags(ShaderStageFlagBits::eGeometry) |
  11704. VkFlags(ShaderStageFlagBits::eFragment) |
  11705. VkFlags(ShaderStageFlagBits::eCompute) |
  11706. VkFlags(ShaderStageFlagBits::eAllGraphics) |
  11707. VkFlags(ShaderStageFlagBits::eAll) |
  11708. VkFlags(ShaderStageFlagBits::eRaygenKHR) |
  11709. VkFlags(ShaderStageFlagBits::eAnyHitKHR) |
  11710. VkFlags(ShaderStageFlagBits::eClosestHitKHR) |
  11711. VkFlags(ShaderStageFlagBits::eMissKHR) |
  11712. VkFlags(ShaderStageFlagBits::eIntersectionKHR) |
  11713. VkFlags(ShaderStageFlagBits::eCallableKHR) |
  11714. VkFlags(ShaderStageFlagBits::eTaskNV) |
  11715. VkFlags(ShaderStageFlagBits::eMeshNV)
  11716. };
  11717. };
  11718. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator|( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11719. {
  11720. return ShaderStageFlags( bit0 ) | bit1;
  11721. }
  11722. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator&( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11723. {
  11724. return ShaderStageFlags( bit0 ) & bit1;
  11725. }
  11726. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator^( ShaderStageFlagBits bit0, ShaderStageFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11727. {
  11728. return ShaderStageFlags( bit0 ) ^ bit1;
  11729. }
  11730. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ShaderStageFlags operator~( ShaderStageFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11731. {
  11732. return ~( ShaderStageFlags( bits ) );
  11733. }
  11734. VULKAN_HPP_INLINE std::string to_string( ShaderStageFlags value )
  11735. {
  11736. if ( !value ) return "{}";
  11737. std::string result;
  11738. if ( value & ShaderStageFlagBits::eVertex ) result += "Vertex | ";
  11739. if ( value & ShaderStageFlagBits::eTessellationControl ) result += "TessellationControl | ";
  11740. if ( value & ShaderStageFlagBits::eTessellationEvaluation ) result += "TessellationEvaluation | ";
  11741. if ( value & ShaderStageFlagBits::eGeometry ) result += "Geometry | ";
  11742. if ( value & ShaderStageFlagBits::eFragment ) result += "Fragment | ";
  11743. if ( value & ShaderStageFlagBits::eCompute ) result += "Compute | ";
  11744. if ( value & ShaderStageFlagBits::eRaygenKHR ) result += "RaygenKHR | ";
  11745. if ( value & ShaderStageFlagBits::eAnyHitKHR ) result += "AnyHitKHR | ";
  11746. if ( value & ShaderStageFlagBits::eClosestHitKHR ) result += "ClosestHitKHR | ";
  11747. if ( value & ShaderStageFlagBits::eMissKHR ) result += "MissKHR | ";
  11748. if ( value & ShaderStageFlagBits::eIntersectionKHR ) result += "IntersectionKHR | ";
  11749. if ( value & ShaderStageFlagBits::eCallableKHR ) result += "CallableKHR | ";
  11750. if ( value & ShaderStageFlagBits::eTaskNV ) result += "TaskNV | ";
  11751. if ( value & ShaderStageFlagBits::eMeshNV ) result += "MeshNV | ";
  11752. return "{ " + result.substr(0, result.size() - 3) + " }";
  11753. }
  11754. using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
  11755. template <> struct FlagTraits<SparseImageFormatFlagBits>
  11756. {
  11757. enum : VkFlags
  11758. {
  11759. allFlags = VkFlags(SparseImageFormatFlagBits::eSingleMiptail) |
  11760. VkFlags(SparseImageFormatFlagBits::eAlignedMipSize) |
  11761. VkFlags(SparseImageFormatFlagBits::eNonstandardBlockSize)
  11762. };
  11763. };
  11764. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator|( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11765. {
  11766. return SparseImageFormatFlags( bit0 ) | bit1;
  11767. }
  11768. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator&( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11769. {
  11770. return SparseImageFormatFlags( bit0 ) & bit1;
  11771. }
  11772. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator^( SparseImageFormatFlagBits bit0, SparseImageFormatFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11773. {
  11774. return SparseImageFormatFlags( bit0 ) ^ bit1;
  11775. }
  11776. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseImageFormatFlags operator~( SparseImageFormatFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11777. {
  11778. return ~( SparseImageFormatFlags( bits ) );
  11779. }
  11780. VULKAN_HPP_INLINE std::string to_string( SparseImageFormatFlags value )
  11781. {
  11782. if ( !value ) return "{}";
  11783. std::string result;
  11784. if ( value & SparseImageFormatFlagBits::eSingleMiptail ) result += "SingleMiptail | ";
  11785. if ( value & SparseImageFormatFlagBits::eAlignedMipSize ) result += "AlignedMipSize | ";
  11786. if ( value & SparseImageFormatFlagBits::eNonstandardBlockSize ) result += "NonstandardBlockSize | ";
  11787. return "{ " + result.substr(0, result.size() - 3) + " }";
  11788. }
  11789. using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
  11790. template <> struct FlagTraits<SparseMemoryBindFlagBits>
  11791. {
  11792. enum : VkFlags
  11793. {
  11794. allFlags = VkFlags(SparseMemoryBindFlagBits::eMetadata)
  11795. };
  11796. };
  11797. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator|( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11798. {
  11799. return SparseMemoryBindFlags( bit0 ) | bit1;
  11800. }
  11801. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator&( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11802. {
  11803. return SparseMemoryBindFlags( bit0 ) & bit1;
  11804. }
  11805. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator^( SparseMemoryBindFlagBits bit0, SparseMemoryBindFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11806. {
  11807. return SparseMemoryBindFlags( bit0 ) ^ bit1;
  11808. }
  11809. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SparseMemoryBindFlags operator~( SparseMemoryBindFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11810. {
  11811. return ~( SparseMemoryBindFlags( bits ) );
  11812. }
  11813. VULKAN_HPP_INLINE std::string to_string( SparseMemoryBindFlags value )
  11814. {
  11815. if ( !value ) return "{}";
  11816. std::string result;
  11817. if ( value & SparseMemoryBindFlagBits::eMetadata ) result += "Metadata | ";
  11818. return "{ " + result.substr(0, result.size() - 3) + " }";
  11819. }
  11820. using StencilFaceFlags = Flags<StencilFaceFlagBits>;
  11821. template <> struct FlagTraits<StencilFaceFlagBits>
  11822. {
  11823. enum : VkFlags
  11824. {
  11825. allFlags = VkFlags(StencilFaceFlagBits::eFront) |
  11826. VkFlags(StencilFaceFlagBits::eBack) |
  11827. VkFlags(StencilFaceFlagBits::eFrontAndBack)
  11828. };
  11829. };
  11830. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator|( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11831. {
  11832. return StencilFaceFlags( bit0 ) | bit1;
  11833. }
  11834. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator&( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11835. {
  11836. return StencilFaceFlags( bit0 ) & bit1;
  11837. }
  11838. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator^( StencilFaceFlagBits bit0, StencilFaceFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11839. {
  11840. return StencilFaceFlags( bit0 ) ^ bit1;
  11841. }
  11842. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR StencilFaceFlags operator~( StencilFaceFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11843. {
  11844. return ~( StencilFaceFlags( bits ) );
  11845. }
  11846. VULKAN_HPP_INLINE std::string to_string( StencilFaceFlags value )
  11847. {
  11848. if ( !value ) return "{}";
  11849. std::string result;
  11850. if ( value & StencilFaceFlagBits::eFront ) result += "Front | ";
  11851. if ( value & StencilFaceFlagBits::eBack ) result += "Back | ";
  11852. return "{ " + result.substr(0, result.size() - 3) + " }";
  11853. }
  11854. #ifdef VK_USE_PLATFORM_GGP
  11855. enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkFlags
  11856. {};
  11857. VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagBitsGGP )
  11858. {
  11859. return "(void)";
  11860. }
  11861. using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
  11862. VULKAN_HPP_INLINE std::string to_string( StreamDescriptorSurfaceCreateFlagsGGP )
  11863. {
  11864. return "{}";
  11865. }
  11866. #endif /*VK_USE_PLATFORM_GGP*/
  11867. using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
  11868. template <> struct FlagTraits<SubgroupFeatureFlagBits>
  11869. {
  11870. enum : VkFlags
  11871. {
  11872. allFlags = VkFlags(SubgroupFeatureFlagBits::eBasic) |
  11873. VkFlags(SubgroupFeatureFlagBits::eVote) |
  11874. VkFlags(SubgroupFeatureFlagBits::eArithmetic) |
  11875. VkFlags(SubgroupFeatureFlagBits::eBallot) |
  11876. VkFlags(SubgroupFeatureFlagBits::eShuffle) |
  11877. VkFlags(SubgroupFeatureFlagBits::eShuffleRelative) |
  11878. VkFlags(SubgroupFeatureFlagBits::eClustered) |
  11879. VkFlags(SubgroupFeatureFlagBits::eQuad) |
  11880. VkFlags(SubgroupFeatureFlagBits::ePartitionedNV)
  11881. };
  11882. };
  11883. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator|( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11884. {
  11885. return SubgroupFeatureFlags( bit0 ) | bit1;
  11886. }
  11887. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator&( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11888. {
  11889. return SubgroupFeatureFlags( bit0 ) & bit1;
  11890. }
  11891. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator^( SubgroupFeatureFlagBits bit0, SubgroupFeatureFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11892. {
  11893. return SubgroupFeatureFlags( bit0 ) ^ bit1;
  11894. }
  11895. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubgroupFeatureFlags operator~( SubgroupFeatureFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11896. {
  11897. return ~( SubgroupFeatureFlags( bits ) );
  11898. }
  11899. VULKAN_HPP_INLINE std::string to_string( SubgroupFeatureFlags value )
  11900. {
  11901. if ( !value ) return "{}";
  11902. std::string result;
  11903. if ( value & SubgroupFeatureFlagBits::eBasic ) result += "Basic | ";
  11904. if ( value & SubgroupFeatureFlagBits::eVote ) result += "Vote | ";
  11905. if ( value & SubgroupFeatureFlagBits::eArithmetic ) result += "Arithmetic | ";
  11906. if ( value & SubgroupFeatureFlagBits::eBallot ) result += "Ballot | ";
  11907. if ( value & SubgroupFeatureFlagBits::eShuffle ) result += "Shuffle | ";
  11908. if ( value & SubgroupFeatureFlagBits::eShuffleRelative ) result += "ShuffleRelative | ";
  11909. if ( value & SubgroupFeatureFlagBits::eClustered ) result += "Clustered | ";
  11910. if ( value & SubgroupFeatureFlagBits::eQuad ) result += "Quad | ";
  11911. if ( value & SubgroupFeatureFlagBits::ePartitionedNV ) result += "PartitionedNV | ";
  11912. return "{ " + result.substr(0, result.size() - 3) + " }";
  11913. }
  11914. using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
  11915. template <> struct FlagTraits<SubpassDescriptionFlagBits>
  11916. {
  11917. enum : VkFlags
  11918. {
  11919. allFlags = VkFlags(SubpassDescriptionFlagBits::ePerViewAttributesNVX) |
  11920. VkFlags(SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX) |
  11921. VkFlags(SubpassDescriptionFlagBits::eFragmentRegionQCOM) |
  11922. VkFlags(SubpassDescriptionFlagBits::eShaderResolveQCOM)
  11923. };
  11924. };
  11925. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator|( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11926. {
  11927. return SubpassDescriptionFlags( bit0 ) | bit1;
  11928. }
  11929. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator&( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11930. {
  11931. return SubpassDescriptionFlags( bit0 ) & bit1;
  11932. }
  11933. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator^( SubpassDescriptionFlagBits bit0, SubpassDescriptionFlagBits bit1 ) VULKAN_HPP_NOEXCEPT
  11934. {
  11935. return SubpassDescriptionFlags( bit0 ) ^ bit1;
  11936. }
  11937. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SubpassDescriptionFlags operator~( SubpassDescriptionFlagBits bits ) VULKAN_HPP_NOEXCEPT
  11938. {
  11939. return ~( SubpassDescriptionFlags( bits ) );
  11940. }
  11941. VULKAN_HPP_INLINE std::string to_string( SubpassDescriptionFlags value )
  11942. {
  11943. if ( !value ) return "{}";
  11944. std::string result;
  11945. if ( value & SubpassDescriptionFlagBits::ePerViewAttributesNVX ) result += "PerViewAttributesNVX | ";
  11946. if ( value & SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX ) result += "PerViewPositionXOnlyNVX | ";
  11947. if ( value & SubpassDescriptionFlagBits::eFragmentRegionQCOM ) result += "FragmentRegionQCOM | ";
  11948. if ( value & SubpassDescriptionFlagBits::eShaderResolveQCOM ) result += "ShaderResolveQCOM | ";
  11949. return "{ " + result.substr(0, result.size() - 3) + " }";
  11950. }
  11951. using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
  11952. template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
  11953. {
  11954. enum : VkFlags
  11955. {
  11956. allFlags = VkFlags(SurfaceCounterFlagBitsEXT::eVblank)
  11957. };
  11958. };
  11959. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator|( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11960. {
  11961. return SurfaceCounterFlagsEXT( bit0 ) | bit1;
  11962. }
  11963. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator&( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11964. {
  11965. return SurfaceCounterFlagsEXT( bit0 ) & bit1;
  11966. }
  11967. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator^( SurfaceCounterFlagBitsEXT bit0, SurfaceCounterFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  11968. {
  11969. return SurfaceCounterFlagsEXT( bit0 ) ^ bit1;
  11970. }
  11971. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceCounterFlagsEXT operator~( SurfaceCounterFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  11972. {
  11973. return ~( SurfaceCounterFlagsEXT( bits ) );
  11974. }
  11975. VULKAN_HPP_INLINE std::string to_string( SurfaceCounterFlagsEXT value )
  11976. {
  11977. if ( !value ) return "{}";
  11978. std::string result;
  11979. if ( value & SurfaceCounterFlagBitsEXT::eVblank ) result += "Vblank | ";
  11980. return "{ " + result.substr(0, result.size() - 3) + " }";
  11981. }
  11982. using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
  11983. template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
  11984. {
  11985. enum : VkFlags
  11986. {
  11987. allFlags = VkFlags(SurfaceTransformFlagBitsKHR::eIdentity) |
  11988. VkFlags(SurfaceTransformFlagBitsKHR::eRotate90) |
  11989. VkFlags(SurfaceTransformFlagBitsKHR::eRotate180) |
  11990. VkFlags(SurfaceTransformFlagBitsKHR::eRotate270) |
  11991. VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirror) |
  11992. VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90) |
  11993. VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180) |
  11994. VkFlags(SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270) |
  11995. VkFlags(SurfaceTransformFlagBitsKHR::eInherit)
  11996. };
  11997. };
  11998. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator|( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  11999. {
  12000. return SurfaceTransformFlagsKHR( bit0 ) | bit1;
  12001. }
  12002. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator&( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  12003. {
  12004. return SurfaceTransformFlagsKHR( bit0 ) & bit1;
  12005. }
  12006. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator^( SurfaceTransformFlagBitsKHR bit0, SurfaceTransformFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  12007. {
  12008. return SurfaceTransformFlagsKHR( bit0 ) ^ bit1;
  12009. }
  12010. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SurfaceTransformFlagsKHR operator~( SurfaceTransformFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  12011. {
  12012. return ~( SurfaceTransformFlagsKHR( bits ) );
  12013. }
  12014. VULKAN_HPP_INLINE std::string to_string( SurfaceTransformFlagsKHR value )
  12015. {
  12016. if ( !value ) return "{}";
  12017. std::string result;
  12018. if ( value & SurfaceTransformFlagBitsKHR::eIdentity ) result += "Identity | ";
  12019. if ( value & SurfaceTransformFlagBitsKHR::eRotate90 ) result += "Rotate90 | ";
  12020. if ( value & SurfaceTransformFlagBitsKHR::eRotate180 ) result += "Rotate180 | ";
  12021. if ( value & SurfaceTransformFlagBitsKHR::eRotate270 ) result += "Rotate270 | ";
  12022. if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirror ) result += "HorizontalMirror | ";
  12023. if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90 ) result += "HorizontalMirrorRotate90 | ";
  12024. if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180 ) result += "HorizontalMirrorRotate180 | ";
  12025. if ( value & SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270 ) result += "HorizontalMirrorRotate270 | ";
  12026. if ( value & SurfaceTransformFlagBitsKHR::eInherit ) result += "Inherit | ";
  12027. return "{ " + result.substr(0, result.size() - 3) + " }";
  12028. }
  12029. using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
  12030. template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
  12031. {
  12032. enum : VkFlags
  12033. {
  12034. allFlags = VkFlags(SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions) |
  12035. VkFlags(SwapchainCreateFlagBitsKHR::eProtected) |
  12036. VkFlags(SwapchainCreateFlagBitsKHR::eMutableFormat)
  12037. };
  12038. };
  12039. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator|( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  12040. {
  12041. return SwapchainCreateFlagsKHR( bit0 ) | bit1;
  12042. }
  12043. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator&( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  12044. {
  12045. return SwapchainCreateFlagsKHR( bit0 ) & bit1;
  12046. }
  12047. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator^( SwapchainCreateFlagBitsKHR bit0, SwapchainCreateFlagBitsKHR bit1 ) VULKAN_HPP_NOEXCEPT
  12048. {
  12049. return SwapchainCreateFlagsKHR( bit0 ) ^ bit1;
  12050. }
  12051. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR SwapchainCreateFlagsKHR operator~( SwapchainCreateFlagBitsKHR bits ) VULKAN_HPP_NOEXCEPT
  12052. {
  12053. return ~( SwapchainCreateFlagsKHR( bits ) );
  12054. }
  12055. VULKAN_HPP_INLINE std::string to_string( SwapchainCreateFlagsKHR value )
  12056. {
  12057. if ( !value ) return "{}";
  12058. std::string result;
  12059. if ( value & SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions ) result += "SplitInstanceBindRegions | ";
  12060. if ( value & SwapchainCreateFlagBitsKHR::eProtected ) result += "Protected | ";
  12061. if ( value & SwapchainCreateFlagBitsKHR::eMutableFormat ) result += "MutableFormat | ";
  12062. return "{ " + result.substr(0, result.size() - 3) + " }";
  12063. }
  12064. using ToolPurposeFlagsEXT = Flags<ToolPurposeFlagBitsEXT>;
  12065. template <> struct FlagTraits<ToolPurposeFlagBitsEXT>
  12066. {
  12067. enum : VkFlags
  12068. {
  12069. allFlags = VkFlags(ToolPurposeFlagBitsEXT::eValidation) |
  12070. VkFlags(ToolPurposeFlagBitsEXT::eProfiling) |
  12071. VkFlags(ToolPurposeFlagBitsEXT::eTracing) |
  12072. VkFlags(ToolPurposeFlagBitsEXT::eAdditionalFeatures) |
  12073. VkFlags(ToolPurposeFlagBitsEXT::eModifyingFeatures) |
  12074. VkFlags(ToolPurposeFlagBitsEXT::eDebugReporting) |
  12075. VkFlags(ToolPurposeFlagBitsEXT::eDebugMarkers)
  12076. };
  12077. };
  12078. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator|( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  12079. {
  12080. return ToolPurposeFlagsEXT( bit0 ) | bit1;
  12081. }
  12082. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator&( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  12083. {
  12084. return ToolPurposeFlagsEXT( bit0 ) & bit1;
  12085. }
  12086. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator^( ToolPurposeFlagBitsEXT bit0, ToolPurposeFlagBitsEXT bit1 ) VULKAN_HPP_NOEXCEPT
  12087. {
  12088. return ToolPurposeFlagsEXT( bit0 ) ^ bit1;
  12089. }
  12090. VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR ToolPurposeFlagsEXT operator~( ToolPurposeFlagBitsEXT bits ) VULKAN_HPP_NOEXCEPT
  12091. {
  12092. return ~( ToolPurposeFlagsEXT( bits ) );
  12093. }
  12094. VULKAN_HPP_INLINE std::string to_string( ToolPurposeFlagsEXT value )
  12095. {
  12096. if ( !value ) return "{}";
  12097. std::string result;
  12098. if ( value & ToolPurposeFlagBitsEXT::eValidation ) result += "Validation | ";
  12099. if ( value & ToolPurposeFlagBitsEXT::eProfiling ) result += "Profiling | ";
  12100. if ( value & ToolPurposeFlagBitsEXT::eTracing ) result += "Tracing | ";
  12101. if ( value & ToolPurposeFlagBitsEXT::eAdditionalFeatures ) result += "AdditionalFeatures | ";
  12102. if ( value & ToolPurposeFlagBitsEXT::eModifyingFeatures ) result += "ModifyingFeatures | ";
  12103. if ( value & ToolPurposeFlagBitsEXT::eDebugReporting ) result += "DebugReporting | ";
  12104. if ( value & ToolPurposeFlagBitsEXT::eDebugMarkers ) result += "DebugMarkers | ";
  12105. return "{ " + result.substr(0, result.size() - 3) + " }";
  12106. }
  12107. enum class ValidationCacheCreateFlagBitsEXT : VkFlags
  12108. {};
  12109. VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagBitsEXT )
  12110. {
  12111. return "(void)";
  12112. }
  12113. using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
  12114. VULKAN_HPP_INLINE std::string to_string( ValidationCacheCreateFlagsEXT )
  12115. {
  12116. return "{}";
  12117. }
  12118. #ifdef VK_USE_PLATFORM_VI_NN
  12119. enum class ViSurfaceCreateFlagBitsNN : VkFlags
  12120. {};
  12121. VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagBitsNN )
  12122. {
  12123. return "(void)";
  12124. }
  12125. using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
  12126. VULKAN_HPP_INLINE std::string to_string( ViSurfaceCreateFlagsNN )
  12127. {
  12128. return "{}";
  12129. }
  12130. #endif /*VK_USE_PLATFORM_VI_NN*/
  12131. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  12132. enum class WaylandSurfaceCreateFlagBitsKHR : VkFlags
  12133. {};
  12134. VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagBitsKHR )
  12135. {
  12136. return "(void)";
  12137. }
  12138. using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
  12139. VULKAN_HPP_INLINE std::string to_string( WaylandSurfaceCreateFlagsKHR )
  12140. {
  12141. return "{}";
  12142. }
  12143. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  12144. #ifdef VK_USE_PLATFORM_WIN32_KHR
  12145. enum class Win32SurfaceCreateFlagBitsKHR : VkFlags
  12146. {};
  12147. VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagBitsKHR )
  12148. {
  12149. return "(void)";
  12150. }
  12151. using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
  12152. VULKAN_HPP_INLINE std::string to_string( Win32SurfaceCreateFlagsKHR )
  12153. {
  12154. return "{}";
  12155. }
  12156. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12157. #ifdef VK_USE_PLATFORM_XCB_KHR
  12158. enum class XcbSurfaceCreateFlagBitsKHR : VkFlags
  12159. {};
  12160. VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagBitsKHR )
  12161. {
  12162. return "(void)";
  12163. }
  12164. using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
  12165. VULKAN_HPP_INLINE std::string to_string( XcbSurfaceCreateFlagsKHR )
  12166. {
  12167. return "{}";
  12168. }
  12169. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  12170. #ifdef VK_USE_PLATFORM_XLIB_KHR
  12171. enum class XlibSurfaceCreateFlagBitsKHR : VkFlags
  12172. {};
  12173. VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagBitsKHR )
  12174. {
  12175. return "(void)";
  12176. }
  12177. using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
  12178. VULKAN_HPP_INLINE std::string to_string( XlibSurfaceCreateFlagsKHR )
  12179. {
  12180. return "{}";
  12181. }
  12182. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  12183. } // namespace VULKAN_HPP_NAMESPACE
  12184. #ifndef VULKAN_HPP_NO_EXCEPTIONS
  12185. namespace std
  12186. {
  12187. template <>
  12188. struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
  12189. {};
  12190. }
  12191. #endif
  12192. namespace VULKAN_HPP_NAMESPACE
  12193. {
  12194. #ifndef VULKAN_HPP_NO_EXCEPTIONS
  12195. class ErrorCategoryImpl : public std::error_category
  12196. {
  12197. public:
  12198. virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
  12199. virtual std::string message(int ev) const override { return to_string(static_cast<Result>(ev)); }
  12200. };
  12201. class Error
  12202. {
  12203. public:
  12204. Error() VULKAN_HPP_NOEXCEPT = default;
  12205. Error(const Error&) VULKAN_HPP_NOEXCEPT = default;
  12206. virtual ~Error() VULKAN_HPP_NOEXCEPT = default;
  12207. virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0;
  12208. };
  12209. class LogicError : public Error, public std::logic_error
  12210. {
  12211. public:
  12212. explicit LogicError( const std::string& what )
  12213. : Error(), std::logic_error(what) {}
  12214. explicit LogicError( char const * what )
  12215. : Error(), std::logic_error(what) {}
  12216. virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); }
  12217. };
  12218. class SystemError : public Error, public std::system_error
  12219. {
  12220. public:
  12221. SystemError( std::error_code ec )
  12222. : Error(), std::system_error(ec) {}
  12223. SystemError( std::error_code ec, std::string const& what )
  12224. : Error(), std::system_error(ec, what) {}
  12225. SystemError( std::error_code ec, char const * what )
  12226. : Error(), std::system_error(ec, what) {}
  12227. SystemError( int ev, std::error_category const& ecat )
  12228. : Error(), std::system_error(ev, ecat) {}
  12229. SystemError( int ev, std::error_category const& ecat, std::string const& what)
  12230. : Error(), std::system_error(ev, ecat, what) {}
  12231. SystemError( int ev, std::error_category const& ecat, char const * what)
  12232. : Error(), std::system_error(ev, ecat, what) {}
  12233. virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); }
  12234. };
  12235. VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT
  12236. {
  12237. static ErrorCategoryImpl instance;
  12238. return instance;
  12239. }
  12240. VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT
  12241. {
  12242. return std::error_code(static_cast<int>(e), errorCategory());
  12243. }
  12244. VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT
  12245. {
  12246. return std::error_condition(static_cast<int>(e), errorCategory());
  12247. }
  12248. class OutOfHostMemoryError : public SystemError
  12249. {
  12250. public:
  12251. OutOfHostMemoryError( std::string const& message )
  12252. : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
  12253. OutOfHostMemoryError( char const * message )
  12254. : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
  12255. };
  12256. class OutOfDeviceMemoryError : public SystemError
  12257. {
  12258. public:
  12259. OutOfDeviceMemoryError( std::string const& message )
  12260. : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
  12261. OutOfDeviceMemoryError( char const * message )
  12262. : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
  12263. };
  12264. class InitializationFailedError : public SystemError
  12265. {
  12266. public:
  12267. InitializationFailedError( std::string const& message )
  12268. : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
  12269. InitializationFailedError( char const * message )
  12270. : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
  12271. };
  12272. class DeviceLostError : public SystemError
  12273. {
  12274. public:
  12275. DeviceLostError( std::string const& message )
  12276. : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
  12277. DeviceLostError( char const * message )
  12278. : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
  12279. };
  12280. class MemoryMapFailedError : public SystemError
  12281. {
  12282. public:
  12283. MemoryMapFailedError( std::string const& message )
  12284. : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
  12285. MemoryMapFailedError( char const * message )
  12286. : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
  12287. };
  12288. class LayerNotPresentError : public SystemError
  12289. {
  12290. public:
  12291. LayerNotPresentError( std::string const& message )
  12292. : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
  12293. LayerNotPresentError( char const * message )
  12294. : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
  12295. };
  12296. class ExtensionNotPresentError : public SystemError
  12297. {
  12298. public:
  12299. ExtensionNotPresentError( std::string const& message )
  12300. : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
  12301. ExtensionNotPresentError( char const * message )
  12302. : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
  12303. };
  12304. class FeatureNotPresentError : public SystemError
  12305. {
  12306. public:
  12307. FeatureNotPresentError( std::string const& message )
  12308. : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
  12309. FeatureNotPresentError( char const * message )
  12310. : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
  12311. };
  12312. class IncompatibleDriverError : public SystemError
  12313. {
  12314. public:
  12315. IncompatibleDriverError( std::string const& message )
  12316. : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
  12317. IncompatibleDriverError( char const * message )
  12318. : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
  12319. };
  12320. class TooManyObjectsError : public SystemError
  12321. {
  12322. public:
  12323. TooManyObjectsError( std::string const& message )
  12324. : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
  12325. TooManyObjectsError( char const * message )
  12326. : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
  12327. };
  12328. class FormatNotSupportedError : public SystemError
  12329. {
  12330. public:
  12331. FormatNotSupportedError( std::string const& message )
  12332. : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
  12333. FormatNotSupportedError( char const * message )
  12334. : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
  12335. };
  12336. class FragmentedPoolError : public SystemError
  12337. {
  12338. public:
  12339. FragmentedPoolError( std::string const& message )
  12340. : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
  12341. FragmentedPoolError( char const * message )
  12342. : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
  12343. };
  12344. class UnknownError : public SystemError
  12345. {
  12346. public:
  12347. UnknownError( std::string const& message )
  12348. : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
  12349. UnknownError( char const * message )
  12350. : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
  12351. };
  12352. class OutOfPoolMemoryError : public SystemError
  12353. {
  12354. public:
  12355. OutOfPoolMemoryError( std::string const& message )
  12356. : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
  12357. OutOfPoolMemoryError( char const * message )
  12358. : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
  12359. };
  12360. class InvalidExternalHandleError : public SystemError
  12361. {
  12362. public:
  12363. InvalidExternalHandleError( std::string const& message )
  12364. : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
  12365. InvalidExternalHandleError( char const * message )
  12366. : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
  12367. };
  12368. class FragmentationError : public SystemError
  12369. {
  12370. public:
  12371. FragmentationError( std::string const& message )
  12372. : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
  12373. FragmentationError( char const * message )
  12374. : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
  12375. };
  12376. class InvalidOpaqueCaptureAddressError : public SystemError
  12377. {
  12378. public:
  12379. InvalidOpaqueCaptureAddressError( std::string const& message )
  12380. : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
  12381. InvalidOpaqueCaptureAddressError( char const * message )
  12382. : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
  12383. };
  12384. class SurfaceLostKHRError : public SystemError
  12385. {
  12386. public:
  12387. SurfaceLostKHRError( std::string const& message )
  12388. : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
  12389. SurfaceLostKHRError( char const * message )
  12390. : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
  12391. };
  12392. class NativeWindowInUseKHRError : public SystemError
  12393. {
  12394. public:
  12395. NativeWindowInUseKHRError( std::string const& message )
  12396. : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
  12397. NativeWindowInUseKHRError( char const * message )
  12398. : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
  12399. };
  12400. class OutOfDateKHRError : public SystemError
  12401. {
  12402. public:
  12403. OutOfDateKHRError( std::string const& message )
  12404. : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
  12405. OutOfDateKHRError( char const * message )
  12406. : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
  12407. };
  12408. class IncompatibleDisplayKHRError : public SystemError
  12409. {
  12410. public:
  12411. IncompatibleDisplayKHRError( std::string const& message )
  12412. : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
  12413. IncompatibleDisplayKHRError( char const * message )
  12414. : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
  12415. };
  12416. class ValidationFailedEXTError : public SystemError
  12417. {
  12418. public:
  12419. ValidationFailedEXTError( std::string const& message )
  12420. : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
  12421. ValidationFailedEXTError( char const * message )
  12422. : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
  12423. };
  12424. class InvalidShaderNVError : public SystemError
  12425. {
  12426. public:
  12427. InvalidShaderNVError( std::string const& message )
  12428. : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
  12429. InvalidShaderNVError( char const * message )
  12430. : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
  12431. };
  12432. class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
  12433. {
  12434. public:
  12435. InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const& message )
  12436. : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
  12437. InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
  12438. : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
  12439. };
  12440. class NotPermittedEXTError : public SystemError
  12441. {
  12442. public:
  12443. NotPermittedEXTError( std::string const& message )
  12444. : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
  12445. NotPermittedEXTError( char const * message )
  12446. : SystemError( make_error_code( Result::eErrorNotPermittedEXT ), message ) {}
  12447. };
  12448. #ifdef VK_USE_PLATFORM_WIN32_KHR
  12449. class FullScreenExclusiveModeLostEXTError : public SystemError
  12450. {
  12451. public:
  12452. FullScreenExclusiveModeLostEXTError( std::string const& message )
  12453. : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
  12454. FullScreenExclusiveModeLostEXTError( char const * message )
  12455. : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
  12456. };
  12457. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12458. [[noreturn]] static void throwResultException( Result result, char const * message )
  12459. {
  12460. switch ( result )
  12461. {
  12462. case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
  12463. case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
  12464. case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
  12465. case Result::eErrorDeviceLost: throw DeviceLostError( message );
  12466. case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
  12467. case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
  12468. case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
  12469. case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
  12470. case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
  12471. case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
  12472. case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
  12473. case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
  12474. case Result::eErrorUnknown: throw UnknownError( message );
  12475. case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
  12476. case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
  12477. case Result::eErrorFragmentation: throw FragmentationError( message );
  12478. case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
  12479. case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
  12480. case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
  12481. case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
  12482. case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
  12483. case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
  12484. case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
  12485. case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
  12486. case Result::eErrorNotPermittedEXT: throw NotPermittedEXTError( message );
  12487. #ifdef VK_USE_PLATFORM_WIN32_KHR
  12488. case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
  12489. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  12490. default: throw SystemError( make_error_code( result ) );
  12491. }
  12492. }
  12493. #endif
  12494. template <typename T> void ignore(T const&) VULKAN_HPP_NOEXCEPT {}
  12495. template <typename T>
  12496. struct ResultValue
  12497. {
  12498. #ifdef VULKAN_HPP_HAS_NOEXCEPT
  12499. ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v)))
  12500. #else
  12501. ResultValue( Result r, T & v )
  12502. #endif
  12503. : result( r )
  12504. , value( v )
  12505. {}
  12506. #ifdef VULKAN_HPP_HAS_NOEXCEPT
  12507. ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v))))
  12508. #else
  12509. ResultValue( Result r, T && v )
  12510. #endif
  12511. : result( r )
  12512. , value( std::move( v ) )
  12513. {}
  12514. Result result;
  12515. T value;
  12516. operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
  12517. #if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
  12518. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12519. operator T const& () const & VULKAN_HPP_NOEXCEPT
  12520. {
  12521. return value;
  12522. }
  12523. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12524. operator T& () & VULKAN_HPP_NOEXCEPT
  12525. {
  12526. return value;
  12527. }
  12528. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12529. operator T const&& () const && VULKAN_HPP_NOEXCEPT
  12530. {
  12531. return std::move( value );
  12532. }
  12533. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12534. operator T&& () && VULKAN_HPP_NOEXCEPT
  12535. {
  12536. return std::move( value );
  12537. }
  12538. #endif
  12539. };
  12540. #if !defined(VULKAN_HPP_NO_SMART_HANDLE)
  12541. template <typename Type, typename Dispatch>
  12542. struct ResultValue<UniqueHandle<Type,Dispatch>>
  12543. {
  12544. #ifdef VULKAN_HPP_HAS_NOEXCEPT
  12545. ResultValue(Result r, UniqueHandle<Type, Dispatch> && v) VULKAN_HPP_NOEXCEPT
  12546. #else
  12547. ResultValue(Result r, UniqueHandle<Type, Dispatch> && v)
  12548. #endif
  12549. : result(r)
  12550. , value(std::move(v))
  12551. {}
  12552. std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
  12553. {
  12554. return std::make_tuple( result, std::move( value ) );
  12555. }
  12556. # if !defined(VULKAN_HPP_DISABLE_IMPLICIT_RESULT_VALUE_CAST)
  12557. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12558. operator UniqueHandle<Type, Dispatch>& () & VULKAN_HPP_NOEXCEPT
  12559. {
  12560. return value;
  12561. }
  12562. VULKAN_HPP_DEPRECATED("Implicit-cast operators on vk::ResultValue are deprecated. Explicitly access the value as member of ResultValue.")
  12563. operator UniqueHandle<Type, Dispatch>() VULKAN_HPP_NOEXCEPT
  12564. {
  12565. return std::move(value);
  12566. }
  12567. # endif
  12568. Result result;
  12569. UniqueHandle<Type, Dispatch> value;
  12570. };
  12571. template <typename Type, typename Dispatch>
  12572. struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
  12573. {
  12574. # ifdef VULKAN_HPP_HAS_NOEXCEPT
  12575. ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
  12576. # else
  12577. ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
  12578. # endif
  12579. : result( r )
  12580. , value( std::move( v ) )
  12581. {}
  12582. Result result;
  12583. std::vector<UniqueHandle<Type, Dispatch>> value;
  12584. operator std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>() VULKAN_HPP_NOEXCEPT
  12585. {
  12586. return std::tuple<Result &, std::vector<UniqueHandle<Type, Dispatch>> &>( result, value );
  12587. }
  12588. };
  12589. #endif
  12590. template <typename T>
  12591. struct ResultValueType
  12592. {
  12593. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12594. typedef ResultValue<T> type;
  12595. #else
  12596. typedef T type;
  12597. #endif
  12598. };
  12599. template <>
  12600. struct ResultValueType<void>
  12601. {
  12602. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12603. typedef Result type;
  12604. #else
  12605. typedef void type;
  12606. #endif
  12607. };
  12608. VULKAN_HPP_INLINE ResultValueType<void>::type createResultValue( Result result, char const * message )
  12609. {
  12610. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12611. ignore(message);
  12612. VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
  12613. return result;
  12614. #else
  12615. if ( result != Result::eSuccess )
  12616. {
  12617. throwResultException( result, message );
  12618. }
  12619. #endif
  12620. }
  12621. template <typename T>
  12622. VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValue( Result result, T & data, char const * message )
  12623. {
  12624. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12625. ignore(message);
  12626. VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
  12627. return ResultValue<T>( result, std::move( data ) );
  12628. #else
  12629. if ( result != Result::eSuccess )
  12630. {
  12631. throwResultException( result, message );
  12632. }
  12633. return std::move( data );
  12634. #endif
  12635. }
  12636. VULKAN_HPP_INLINE Result createResultValue( Result result, char const * message, std::initializer_list<Result> successCodes )
  12637. {
  12638. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12639. ignore(message);
  12640. ignore(successCodes); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
  12641. VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
  12642. #else
  12643. if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
  12644. {
  12645. throwResultException( result, message );
  12646. }
  12647. #endif
  12648. return result;
  12649. }
  12650. template <typename T>
  12651. VULKAN_HPP_INLINE ResultValue<T> createResultValue( Result result, T & data, char const * message, std::initializer_list<Result> successCodes )
  12652. {
  12653. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12654. ignore(message);
  12655. ignore(successCodes); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
  12656. VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
  12657. #else
  12658. if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
  12659. {
  12660. throwResultException( result, message );
  12661. }
  12662. #endif
  12663. return ResultValue<T>( result, data );
  12664. }
  12665. #ifndef VULKAN_HPP_NO_SMART_HANDLE
  12666. template <typename T, typename D>
  12667. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<T,D>>::type createResultValue( Result result, T & data, char const * message, typename UniqueHandleTraits<T,D>::deleter const& deleter )
  12668. {
  12669. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  12670. ignore(message);
  12671. VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
  12672. return ResultValue<UniqueHandle<T,D>>( result, UniqueHandle<T,D>(data, deleter) );
  12673. #else
  12674. if ( result != Result::eSuccess )
  12675. {
  12676. throwResultException( result, message );
  12677. }
  12678. return UniqueHandle<T,D>(data, deleter);
  12679. #endif
  12680. }
  12681. template <typename T, typename D>
  12682. VULKAN_HPP_INLINE ResultValue<UniqueHandle<T, D>>
  12683. createResultValue( Result result,
  12684. T & data,
  12685. char const * message,
  12686. std::initializer_list<Result> successCodes,
  12687. typename UniqueHandleTraits<T, D>::deleter const & deleter )
  12688. {
  12689. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12690. ignore( message );
  12691. ignore(successCodes); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
  12692. VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
  12693. # else
  12694. if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
  12695. {
  12696. throwResultException( result, message );
  12697. }
  12698. # endif
  12699. return ResultValue<UniqueHandle<T, D>>( result, UniqueHandle<T, D>( data, deleter ) );
  12700. }
  12701. template <typename T, typename D>
  12702. VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<T, D>>>::type
  12703. createResultValue( Result result, std::vector<UniqueHandle<T, D>> && data, char const * message )
  12704. {
  12705. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12706. ignore( message );
  12707. VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
  12708. return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
  12709. # else
  12710. if ( result != Result::eSuccess )
  12711. {
  12712. throwResultException( result, message );
  12713. }
  12714. return std::move( data );
  12715. # endif
  12716. }
  12717. template <typename T, typename D>
  12718. VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<T, D>>>
  12719. createResultValue( Result result,
  12720. std::vector<UniqueHandle<T, D>> && data,
  12721. char const * message,
  12722. std::initializer_list<Result> successCodes )
  12723. {
  12724. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  12725. ignore( message );
  12726. ignore(successCodes); // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
  12727. VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
  12728. # else
  12729. if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
  12730. {
  12731. throwResultException( result, message );
  12732. }
  12733. # endif
  12734. return ResultValue<std::vector<UniqueHandle<T, D>>>( result, std::move( data ) );
  12735. }
  12736. #endif
  12737. struct AabbPositionsKHR
  12738. {
  12739. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  12740. VULKAN_HPP_CONSTEXPR AabbPositionsKHR(float minX_ = {}, float minY_ = {}, float minZ_ = {}, float maxX_ = {}, float maxY_ = {}, float maxZ_ = {}) VULKAN_HPP_NOEXCEPT
  12741. : minX( minX_ ), minY( minY_ ), minZ( minZ_ ), maxX( maxX_ ), maxY( maxY_ ), maxZ( maxZ_ )
  12742. {}
  12743. VULKAN_HPP_CONSTEXPR AabbPositionsKHR( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  12744. AabbPositionsKHR( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  12745. : AabbPositionsKHR( *reinterpret_cast<AabbPositionsKHR const *>( &rhs ) )
  12746. {}
  12747. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  12748. VULKAN_HPP_CONSTEXPR_14 AabbPositionsKHR & operator=( AabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  12749. AabbPositionsKHR & operator=( VkAabbPositionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  12750. {
  12751. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AabbPositionsKHR const *>( &rhs );
  12752. return *this;
  12753. }
  12754. AabbPositionsKHR & setMinX( float minX_ ) VULKAN_HPP_NOEXCEPT
  12755. {
  12756. minX = minX_;
  12757. return *this;
  12758. }
  12759. AabbPositionsKHR & setMinY( float minY_ ) VULKAN_HPP_NOEXCEPT
  12760. {
  12761. minY = minY_;
  12762. return *this;
  12763. }
  12764. AabbPositionsKHR & setMinZ( float minZ_ ) VULKAN_HPP_NOEXCEPT
  12765. {
  12766. minZ = minZ_;
  12767. return *this;
  12768. }
  12769. AabbPositionsKHR & setMaxX( float maxX_ ) VULKAN_HPP_NOEXCEPT
  12770. {
  12771. maxX = maxX_;
  12772. return *this;
  12773. }
  12774. AabbPositionsKHR & setMaxY( float maxY_ ) VULKAN_HPP_NOEXCEPT
  12775. {
  12776. maxY = maxY_;
  12777. return *this;
  12778. }
  12779. AabbPositionsKHR & setMaxZ( float maxZ_ ) VULKAN_HPP_NOEXCEPT
  12780. {
  12781. maxZ = maxZ_;
  12782. return *this;
  12783. }
  12784. operator VkAabbPositionsKHR const&() const VULKAN_HPP_NOEXCEPT
  12785. {
  12786. return *reinterpret_cast<const VkAabbPositionsKHR*>( this );
  12787. }
  12788. operator VkAabbPositionsKHR &() VULKAN_HPP_NOEXCEPT
  12789. {
  12790. return *reinterpret_cast<VkAabbPositionsKHR*>( this );
  12791. }
  12792. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  12793. auto operator<=>( AabbPositionsKHR const& ) const = default;
  12794. #else
  12795. bool operator==( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  12796. {
  12797. return ( minX == rhs.minX )
  12798. && ( minY == rhs.minY )
  12799. && ( minZ == rhs.minZ )
  12800. && ( maxX == rhs.maxX )
  12801. && ( maxY == rhs.maxY )
  12802. && ( maxZ == rhs.maxZ );
  12803. }
  12804. bool operator!=( AabbPositionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  12805. {
  12806. return !operator==( rhs );
  12807. }
  12808. #endif
  12809. public:
  12810. float minX = {};
  12811. float minY = {};
  12812. float minZ = {};
  12813. float maxX = {};
  12814. float maxY = {};
  12815. float maxZ = {};
  12816. };
  12817. static_assert( sizeof( AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
  12818. static_assert( std::is_standard_layout<AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
  12819. using AabbPositionsNV = AabbPositionsKHR;
  12820. class AccelerationStructureKHR
  12821. {
  12822. public:
  12823. using CType = VkAccelerationStructureKHR;
  12824. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR;
  12825. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR;
  12826. public:
  12827. VULKAN_HPP_CONSTEXPR AccelerationStructureKHR() VULKAN_HPP_NOEXCEPT
  12828. : m_accelerationStructureKHR(VK_NULL_HANDLE)
  12829. {}
  12830. VULKAN_HPP_CONSTEXPR AccelerationStructureKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  12831. : m_accelerationStructureKHR(VK_NULL_HANDLE)
  12832. {}
  12833. VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureKHR( VkAccelerationStructureKHR accelerationStructureKHR ) VULKAN_HPP_NOEXCEPT
  12834. : m_accelerationStructureKHR( accelerationStructureKHR )
  12835. {}
  12836. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  12837. AccelerationStructureKHR & operator=(VkAccelerationStructureKHR accelerationStructureKHR) VULKAN_HPP_NOEXCEPT
  12838. {
  12839. m_accelerationStructureKHR = accelerationStructureKHR;
  12840. return *this;
  12841. }
  12842. #endif
  12843. AccelerationStructureKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  12844. {
  12845. m_accelerationStructureKHR = VK_NULL_HANDLE;
  12846. return *this;
  12847. }
  12848. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  12849. auto operator<=>( AccelerationStructureKHR const& ) const = default;
  12850. #else
  12851. bool operator==( AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  12852. {
  12853. return m_accelerationStructureKHR == rhs.m_accelerationStructureKHR;
  12854. }
  12855. bool operator!=(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  12856. {
  12857. return m_accelerationStructureKHR != rhs.m_accelerationStructureKHR;
  12858. }
  12859. bool operator<(AccelerationStructureKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  12860. {
  12861. return m_accelerationStructureKHR < rhs.m_accelerationStructureKHR;
  12862. }
  12863. #endif
  12864. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureKHR() const VULKAN_HPP_NOEXCEPT
  12865. {
  12866. return m_accelerationStructureKHR;
  12867. }
  12868. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  12869. {
  12870. return m_accelerationStructureKHR != VK_NULL_HANDLE;
  12871. }
  12872. bool operator!() const VULKAN_HPP_NOEXCEPT
  12873. {
  12874. return m_accelerationStructureKHR == VK_NULL_HANDLE;
  12875. }
  12876. private:
  12877. VkAccelerationStructureKHR m_accelerationStructureKHR;
  12878. };
  12879. static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ), "handle and wrapper have different size!" );
  12880. template <>
  12881. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eAccelerationStructureKHR>
  12882. {
  12883. using type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  12884. };
  12885. template <>
  12886. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureKHR>
  12887. {
  12888. using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  12889. };
  12890. template <>
  12891. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureKHR>
  12892. {
  12893. using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
  12894. };
  12895. template <>
  12896. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
  12897. {
  12898. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  12899. };
  12900. union DeviceOrHostAddressConstKHR
  12901. {
  12902. DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const& rhs ) VULKAN_HPP_NOEXCEPT
  12903. {
  12904. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
  12905. }
  12906. DeviceOrHostAddressConstKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
  12907. : deviceAddress( deviceAddress_ )
  12908. {}
  12909. DeviceOrHostAddressConstKHR( const void* hostAddress_ )
  12910. : hostAddress( hostAddress_ )
  12911. {}
  12912. DeviceOrHostAddressConstKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
  12913. {
  12914. deviceAddress = deviceAddress_;
  12915. return *this;
  12916. }
  12917. DeviceOrHostAddressConstKHR & setHostAddress( const void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
  12918. {
  12919. hostAddress = hostAddress_;
  12920. return *this;
  12921. }
  12922. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  12923. {
  12924. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) );
  12925. return *this;
  12926. }
  12927. operator VkDeviceOrHostAddressConstKHR const&() const
  12928. {
  12929. return *reinterpret_cast<const VkDeviceOrHostAddressConstKHR*>(this);
  12930. }
  12931. operator VkDeviceOrHostAddressConstKHR &()
  12932. {
  12933. return *reinterpret_cast<VkDeviceOrHostAddressConstKHR*>(this);
  12934. }
  12935. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  12936. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
  12937. const void* hostAddress;
  12938. #else
  12939. VkDeviceAddress deviceAddress;
  12940. const void* hostAddress;
  12941. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  12942. };
  12943. struct AccelerationStructureGeometryTrianglesDataKHR
  12944. {
  12945. static const bool allowDuplicate = false;
  12946. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
  12947. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  12948. AccelerationStructureGeometryTrianglesDataKHR(VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, uint32_t maxVertex_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData_ = {}) VULKAN_HPP_NOEXCEPT
  12949. : vertexFormat( vertexFormat_ ), vertexData( vertexData_ ), vertexStride( vertexStride_ ), maxVertex( maxVertex_ ), indexType( indexType_ ), indexData( indexData_ ), transformData( transformData_ )
  12950. {}
  12951. AccelerationStructureGeometryTrianglesDataKHR( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  12952. AccelerationStructureGeometryTrianglesDataKHR( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  12953. : AccelerationStructureGeometryTrianglesDataKHR( *reinterpret_cast<AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs ) )
  12954. {}
  12955. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  12956. AccelerationStructureGeometryTrianglesDataKHR & operator=( AccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  12957. AccelerationStructureGeometryTrianglesDataKHR & operator=( VkAccelerationStructureGeometryTrianglesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  12958. {
  12959. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const *>( &rhs );
  12960. return *this;
  12961. }
  12962. AccelerationStructureGeometryTrianglesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  12963. {
  12964. pNext = pNext_;
  12965. return *this;
  12966. }
  12967. AccelerationStructureGeometryTrianglesDataKHR & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
  12968. {
  12969. vertexFormat = vertexFormat_;
  12970. return *this;
  12971. }
  12972. AccelerationStructureGeometryTrianglesDataKHR & setVertexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & vertexData_ ) VULKAN_HPP_NOEXCEPT
  12973. {
  12974. vertexData = vertexData_;
  12975. return *this;
  12976. }
  12977. AccelerationStructureGeometryTrianglesDataKHR & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
  12978. {
  12979. vertexStride = vertexStride_;
  12980. return *this;
  12981. }
  12982. AccelerationStructureGeometryTrianglesDataKHR & setMaxVertex( uint32_t maxVertex_ ) VULKAN_HPP_NOEXCEPT
  12983. {
  12984. maxVertex = maxVertex_;
  12985. return *this;
  12986. }
  12987. AccelerationStructureGeometryTrianglesDataKHR & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
  12988. {
  12989. indexType = indexType_;
  12990. return *this;
  12991. }
  12992. AccelerationStructureGeometryTrianglesDataKHR & setIndexData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & indexData_ ) VULKAN_HPP_NOEXCEPT
  12993. {
  12994. indexData = indexData_;
  12995. return *this;
  12996. }
  12997. AccelerationStructureGeometryTrianglesDataKHR & setTransformData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & transformData_ ) VULKAN_HPP_NOEXCEPT
  12998. {
  12999. transformData = transformData_;
  13000. return *this;
  13001. }
  13002. operator VkAccelerationStructureGeometryTrianglesDataKHR const&() const VULKAN_HPP_NOEXCEPT
  13003. {
  13004. return *reinterpret_cast<const VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
  13005. }
  13006. operator VkAccelerationStructureGeometryTrianglesDataKHR &() VULKAN_HPP_NOEXCEPT
  13007. {
  13008. return *reinterpret_cast<VkAccelerationStructureGeometryTrianglesDataKHR*>( this );
  13009. }
  13010. public:
  13011. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryTrianglesDataKHR;
  13012. const void* pNext = {};
  13013. VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  13014. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR vertexData = {};
  13015. VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
  13016. uint32_t maxVertex = {};
  13017. VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
  13018. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR indexData = {};
  13019. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR transformData = {};
  13020. };
  13021. static_assert( sizeof( AccelerationStructureGeometryTrianglesDataKHR ) == sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ), "struct and wrapper have different size!" );
  13022. static_assert( std::is_standard_layout<AccelerationStructureGeometryTrianglesDataKHR>::value, "struct wrapper is not a standard layout!" );
  13023. template <>
  13024. struct CppType<StructureType, StructureType::eAccelerationStructureGeometryTrianglesDataKHR>
  13025. {
  13026. using Type = AccelerationStructureGeometryTrianglesDataKHR;
  13027. };
  13028. struct AccelerationStructureGeometryAabbsDataKHR
  13029. {
  13030. static const bool allowDuplicate = false;
  13031. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
  13032. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13033. AccelerationStructureGeometryAabbsDataKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}) VULKAN_HPP_NOEXCEPT
  13034. : data( data_ ), stride( stride_ )
  13035. {}
  13036. AccelerationStructureGeometryAabbsDataKHR( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13037. AccelerationStructureGeometryAabbsDataKHR( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13038. : AccelerationStructureGeometryAabbsDataKHR( *reinterpret_cast<AccelerationStructureGeometryAabbsDataKHR const *>( &rhs ) )
  13039. {}
  13040. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13041. AccelerationStructureGeometryAabbsDataKHR & operator=( AccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13042. AccelerationStructureGeometryAabbsDataKHR & operator=( VkAccelerationStructureGeometryAabbsDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13043. {
  13044. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const *>( &rhs );
  13045. return *this;
  13046. }
  13047. AccelerationStructureGeometryAabbsDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13048. {
  13049. pNext = pNext_;
  13050. return *this;
  13051. }
  13052. AccelerationStructureGeometryAabbsDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
  13053. {
  13054. data = data_;
  13055. return *this;
  13056. }
  13057. AccelerationStructureGeometryAabbsDataKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
  13058. {
  13059. stride = stride_;
  13060. return *this;
  13061. }
  13062. operator VkAccelerationStructureGeometryAabbsDataKHR const&() const VULKAN_HPP_NOEXCEPT
  13063. {
  13064. return *reinterpret_cast<const VkAccelerationStructureGeometryAabbsDataKHR*>( this );
  13065. }
  13066. operator VkAccelerationStructureGeometryAabbsDataKHR &() VULKAN_HPP_NOEXCEPT
  13067. {
  13068. return *reinterpret_cast<VkAccelerationStructureGeometryAabbsDataKHR*>( this );
  13069. }
  13070. public:
  13071. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryAabbsDataKHR;
  13072. const void* pNext = {};
  13073. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
  13074. VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
  13075. };
  13076. static_assert( sizeof( AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ), "struct and wrapper have different size!" );
  13077. static_assert( std::is_standard_layout<AccelerationStructureGeometryAabbsDataKHR>::value, "struct wrapper is not a standard layout!" );
  13078. template <>
  13079. struct CppType<StructureType, StructureType::eAccelerationStructureGeometryAabbsDataKHR>
  13080. {
  13081. using Type = AccelerationStructureGeometryAabbsDataKHR;
  13082. };
  13083. struct AccelerationStructureGeometryInstancesDataKHR
  13084. {
  13085. static const bool allowDuplicate = false;
  13086. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
  13087. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13088. AccelerationStructureGeometryInstancesDataKHR(VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data_ = {}) VULKAN_HPP_NOEXCEPT
  13089. : arrayOfPointers( arrayOfPointers_ ), data( data_ )
  13090. {}
  13091. AccelerationStructureGeometryInstancesDataKHR( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13092. AccelerationStructureGeometryInstancesDataKHR( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13093. : AccelerationStructureGeometryInstancesDataKHR( *reinterpret_cast<AccelerationStructureGeometryInstancesDataKHR const *>( &rhs ) )
  13094. {}
  13095. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13096. AccelerationStructureGeometryInstancesDataKHR & operator=( AccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13097. AccelerationStructureGeometryInstancesDataKHR & operator=( VkAccelerationStructureGeometryInstancesDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13098. {
  13099. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const *>( &rhs );
  13100. return *this;
  13101. }
  13102. AccelerationStructureGeometryInstancesDataKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13103. {
  13104. pNext = pNext_;
  13105. return *this;
  13106. }
  13107. AccelerationStructureGeometryInstancesDataKHR & setArrayOfPointers( VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers_ ) VULKAN_HPP_NOEXCEPT
  13108. {
  13109. arrayOfPointers = arrayOfPointers_;
  13110. return *this;
  13111. }
  13112. AccelerationStructureGeometryInstancesDataKHR & setData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & data_ ) VULKAN_HPP_NOEXCEPT
  13113. {
  13114. data = data_;
  13115. return *this;
  13116. }
  13117. operator VkAccelerationStructureGeometryInstancesDataKHR const&() const VULKAN_HPP_NOEXCEPT
  13118. {
  13119. return *reinterpret_cast<const VkAccelerationStructureGeometryInstancesDataKHR*>( this );
  13120. }
  13121. operator VkAccelerationStructureGeometryInstancesDataKHR &() VULKAN_HPP_NOEXCEPT
  13122. {
  13123. return *reinterpret_cast<VkAccelerationStructureGeometryInstancesDataKHR*>( this );
  13124. }
  13125. public:
  13126. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryInstancesDataKHR;
  13127. const void* pNext = {};
  13128. VULKAN_HPP_NAMESPACE::Bool32 arrayOfPointers = {};
  13129. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR data = {};
  13130. };
  13131. static_assert( sizeof( AccelerationStructureGeometryInstancesDataKHR ) == sizeof( VkAccelerationStructureGeometryInstancesDataKHR ), "struct and wrapper have different size!" );
  13132. static_assert( std::is_standard_layout<AccelerationStructureGeometryInstancesDataKHR>::value, "struct wrapper is not a standard layout!" );
  13133. template <>
  13134. struct CppType<StructureType, StructureType::eAccelerationStructureGeometryInstancesDataKHR>
  13135. {
  13136. using Type = AccelerationStructureGeometryInstancesDataKHR;
  13137. };
  13138. union AccelerationStructureGeometryDataKHR
  13139. {
  13140. AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const& rhs ) VULKAN_HPP_NOEXCEPT
  13141. {
  13142. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
  13143. }
  13144. AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles_ = {} )
  13145. : triangles( triangles_ )
  13146. {}
  13147. AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs_ )
  13148. : aabbs( aabbs_ )
  13149. {}
  13150. AccelerationStructureGeometryDataKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances_ )
  13151. : instances( instances_ )
  13152. {}
  13153. AccelerationStructureGeometryDataKHR & setTriangles( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR const & triangles_ ) VULKAN_HPP_NOEXCEPT
  13154. {
  13155. triangles = triangles_;
  13156. return *this;
  13157. }
  13158. AccelerationStructureGeometryDataKHR & setAabbs( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR const & aabbs_ ) VULKAN_HPP_NOEXCEPT
  13159. {
  13160. aabbs = aabbs_;
  13161. return *this;
  13162. }
  13163. AccelerationStructureGeometryDataKHR & setInstances( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR const & instances_ ) VULKAN_HPP_NOEXCEPT
  13164. {
  13165. instances = instances_;
  13166. return *this;
  13167. }
  13168. VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR & operator=( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13169. {
  13170. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) );
  13171. return *this;
  13172. }
  13173. operator VkAccelerationStructureGeometryDataKHR const&() const
  13174. {
  13175. return *reinterpret_cast<const VkAccelerationStructureGeometryDataKHR*>(this);
  13176. }
  13177. operator VkAccelerationStructureGeometryDataKHR &()
  13178. {
  13179. return *reinterpret_cast<VkAccelerationStructureGeometryDataKHR*>(this);
  13180. }
  13181. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  13182. VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR triangles;
  13183. VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR aabbs;
  13184. VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR instances;
  13185. #else
  13186. VkAccelerationStructureGeometryTrianglesDataKHR triangles;
  13187. VkAccelerationStructureGeometryAabbsDataKHR aabbs;
  13188. VkAccelerationStructureGeometryInstancesDataKHR instances;
  13189. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  13190. };
  13191. struct AccelerationStructureGeometryKHR
  13192. {
  13193. static const bool allowDuplicate = false;
  13194. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureGeometryKHR;
  13195. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13196. AccelerationStructureGeometryKHR(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
  13197. : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
  13198. {}
  13199. AccelerationStructureGeometryKHR( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13200. AccelerationStructureGeometryKHR( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13201. : AccelerationStructureGeometryKHR( *reinterpret_cast<AccelerationStructureGeometryKHR const *>( &rhs ) )
  13202. {}
  13203. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13204. AccelerationStructureGeometryKHR & operator=( AccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13205. AccelerationStructureGeometryKHR & operator=( VkAccelerationStructureGeometryKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13206. {
  13207. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR const *>( &rhs );
  13208. return *this;
  13209. }
  13210. AccelerationStructureGeometryKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13211. {
  13212. pNext = pNext_;
  13213. return *this;
  13214. }
  13215. AccelerationStructureGeometryKHR & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
  13216. {
  13217. geometryType = geometryType_;
  13218. return *this;
  13219. }
  13220. AccelerationStructureGeometryKHR & setGeometry( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR const & geometry_ ) VULKAN_HPP_NOEXCEPT
  13221. {
  13222. geometry = geometry_;
  13223. return *this;
  13224. }
  13225. AccelerationStructureGeometryKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  13226. {
  13227. flags = flags_;
  13228. return *this;
  13229. }
  13230. operator VkAccelerationStructureGeometryKHR const&() const VULKAN_HPP_NOEXCEPT
  13231. {
  13232. return *reinterpret_cast<const VkAccelerationStructureGeometryKHR*>( this );
  13233. }
  13234. operator VkAccelerationStructureGeometryKHR &() VULKAN_HPP_NOEXCEPT
  13235. {
  13236. return *reinterpret_cast<VkAccelerationStructureGeometryKHR*>( this );
  13237. }
  13238. public:
  13239. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureGeometryKHR;
  13240. const void* pNext = {};
  13241. VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
  13242. VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR geometry = {};
  13243. VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
  13244. };
  13245. static_assert( sizeof( AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ), "struct and wrapper have different size!" );
  13246. static_assert( std::is_standard_layout<AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
  13247. template <>
  13248. struct CppType<StructureType, StructureType::eAccelerationStructureGeometryKHR>
  13249. {
  13250. using Type = AccelerationStructureGeometryKHR;
  13251. };
  13252. union DeviceOrHostAddressKHR
  13253. {
  13254. DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const& rhs ) VULKAN_HPP_NOEXCEPT
  13255. {
  13256. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
  13257. }
  13258. DeviceOrHostAddressKHR( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {} )
  13259. : deviceAddress( deviceAddress_ )
  13260. {}
  13261. DeviceOrHostAddressKHR( void* hostAddress_ )
  13262. : hostAddress( hostAddress_ )
  13263. {}
  13264. DeviceOrHostAddressKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
  13265. {
  13266. deviceAddress = deviceAddress_;
  13267. return *this;
  13268. }
  13269. DeviceOrHostAddressKHR & setHostAddress( void* hostAddress_ ) VULKAN_HPP_NOEXCEPT
  13270. {
  13271. hostAddress = hostAddress_;
  13272. return *this;
  13273. }
  13274. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR & operator=( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13275. {
  13276. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) );
  13277. return *this;
  13278. }
  13279. operator VkDeviceOrHostAddressKHR const&() const
  13280. {
  13281. return *reinterpret_cast<const VkDeviceOrHostAddressKHR*>(this);
  13282. }
  13283. operator VkDeviceOrHostAddressKHR &()
  13284. {
  13285. return *reinterpret_cast<VkDeviceOrHostAddressKHR*>(this);
  13286. }
  13287. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  13288. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress;
  13289. void* hostAddress;
  13290. #else
  13291. VkDeviceAddress deviceAddress;
  13292. void* hostAddress;
  13293. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  13294. };
  13295. struct AccelerationStructureBuildGeometryInfoKHR
  13296. {
  13297. static const bool allowDuplicate = false;
  13298. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
  13299. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13300. AccelerationStructureBuildGeometryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* pGeometries_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {}) VULKAN_HPP_NOEXCEPT
  13301. : type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ ), ppGeometries( ppGeometries_ ), scratchData( scratchData_ )
  13302. {}
  13303. AccelerationStructureBuildGeometryInfoKHR( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13304. AccelerationStructureBuildGeometryInfoKHR( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13305. : AccelerationStructureBuildGeometryInfoKHR( *reinterpret_cast<AccelerationStructureBuildGeometryInfoKHR const *>( &rhs ) )
  13306. {}
  13307. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13308. AccelerationStructureBuildGeometryInfoKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const > const & pGeometries_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData_ = {} )
  13309. : type( type_ ), flags( flags_ ), mode( mode_ ), srcAccelerationStructure( srcAccelerationStructure_ ), dstAccelerationStructure( dstAccelerationStructure_ ), geometryCount( static_cast<uint32_t>( !geometries_.empty() ? geometries_.size() : pGeometries_.size() ) ), pGeometries( geometries_.data() ), ppGeometries( pGeometries_.data() ), scratchData( scratchData_ )
  13310. {
  13311. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  13312. VULKAN_HPP_ASSERT( ( !geometries_.empty() + !pGeometries_.empty() ) == 1 );
  13313. #else
  13314. if ( ( !geometries_.empty() + !pGeometries_.empty() ) != 1 )
  13315. {
  13316. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::AccelerationStructureBuildGeometryInfoKHR::AccelerationStructureBuildGeometryInfoKHR: ( !geometries_.empty() + !pGeometries_.empty() ) != 1" );
  13317. }
  13318. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  13319. }
  13320. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13321. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13322. AccelerationStructureBuildGeometryInfoKHR & operator=( AccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13323. AccelerationStructureBuildGeometryInfoKHR & operator=( VkAccelerationStructureBuildGeometryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13324. {
  13325. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR const *>( &rhs );
  13326. return *this;
  13327. }
  13328. AccelerationStructureBuildGeometryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13329. {
  13330. pNext = pNext_;
  13331. return *this;
  13332. }
  13333. AccelerationStructureBuildGeometryInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
  13334. {
  13335. type = type_;
  13336. return *this;
  13337. }
  13338. AccelerationStructureBuildGeometryInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  13339. {
  13340. flags = flags_;
  13341. return *this;
  13342. }
  13343. AccelerationStructureBuildGeometryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
  13344. {
  13345. mode = mode_;
  13346. return *this;
  13347. }
  13348. AccelerationStructureBuildGeometryInfoKHR & setSrcAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  13349. {
  13350. srcAccelerationStructure = srcAccelerationStructure_;
  13351. return *this;
  13352. }
  13353. AccelerationStructureBuildGeometryInfoKHR & setDstAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  13354. {
  13355. dstAccelerationStructure = dstAccelerationStructure_;
  13356. return *this;
  13357. }
  13358. AccelerationStructureBuildGeometryInfoKHR & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
  13359. {
  13360. geometryCount = geometryCount_;
  13361. return *this;
  13362. }
  13363. AccelerationStructureBuildGeometryInfoKHR & setPGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* pGeometries_ ) VULKAN_HPP_NOEXCEPT
  13364. {
  13365. pGeometries = pGeometries_;
  13366. return *this;
  13367. }
  13368. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13369. AccelerationStructureBuildGeometryInfoKHR & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR> const & geometries_ ) VULKAN_HPP_NOEXCEPT
  13370. {
  13371. geometryCount = static_cast<uint32_t>( geometries_.size() );
  13372. pGeometries = geometries_.data();
  13373. return *this;
  13374. }
  13375. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13376. AccelerationStructureBuildGeometryInfoKHR & setPpGeometries( const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries_ ) VULKAN_HPP_NOEXCEPT
  13377. {
  13378. ppGeometries = ppGeometries_;
  13379. return *this;
  13380. }
  13381. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13382. AccelerationStructureBuildGeometryInfoKHR & setPGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const > const & pGeometries_ ) VULKAN_HPP_NOEXCEPT
  13383. {
  13384. geometryCount = static_cast<uint32_t>( pGeometries_.size() );
  13385. ppGeometries = pGeometries_.data();
  13386. return *this;
  13387. }
  13388. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  13389. AccelerationStructureBuildGeometryInfoKHR & setScratchData( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & scratchData_ ) VULKAN_HPP_NOEXCEPT
  13390. {
  13391. scratchData = scratchData_;
  13392. return *this;
  13393. }
  13394. operator VkAccelerationStructureBuildGeometryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  13395. {
  13396. return *reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR*>( this );
  13397. }
  13398. operator VkAccelerationStructureBuildGeometryInfoKHR &() VULKAN_HPP_NOEXCEPT
  13399. {
  13400. return *reinterpret_cast<VkAccelerationStructureBuildGeometryInfoKHR*>( this );
  13401. }
  13402. public:
  13403. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildGeometryInfoKHR;
  13404. const void* pNext = {};
  13405. VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
  13406. VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR flags = {};
  13407. VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR::eBuild;
  13408. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR srcAccelerationStructure = {};
  13409. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dstAccelerationStructure = {};
  13410. uint32_t geometryCount = {};
  13411. const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* pGeometries = {};
  13412. const VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR* const * ppGeometries = {};
  13413. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR scratchData = {};
  13414. };
  13415. static_assert( sizeof( AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ), "struct and wrapper have different size!" );
  13416. static_assert( std::is_standard_layout<AccelerationStructureBuildGeometryInfoKHR>::value, "struct wrapper is not a standard layout!" );
  13417. template <>
  13418. struct CppType<StructureType, StructureType::eAccelerationStructureBuildGeometryInfoKHR>
  13419. {
  13420. using Type = AccelerationStructureBuildGeometryInfoKHR;
  13421. };
  13422. struct AccelerationStructureBuildRangeInfoKHR
  13423. {
  13424. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13425. VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR(uint32_t primitiveCount_ = {}, uint32_t primitiveOffset_ = {}, uint32_t firstVertex_ = {}, uint32_t transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
  13426. : primitiveCount( primitiveCount_ ), primitiveOffset( primitiveOffset_ ), firstVertex( firstVertex_ ), transformOffset( transformOffset_ )
  13427. {}
  13428. VULKAN_HPP_CONSTEXPR AccelerationStructureBuildRangeInfoKHR( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13429. AccelerationStructureBuildRangeInfoKHR( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13430. : AccelerationStructureBuildRangeInfoKHR( *reinterpret_cast<AccelerationStructureBuildRangeInfoKHR const *>( &rhs ) )
  13431. {}
  13432. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13433. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildRangeInfoKHR & operator=( AccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13434. AccelerationStructureBuildRangeInfoKHR & operator=( VkAccelerationStructureBuildRangeInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13435. {
  13436. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR const *>( &rhs );
  13437. return *this;
  13438. }
  13439. AccelerationStructureBuildRangeInfoKHR & setPrimitiveCount( uint32_t primitiveCount_ ) VULKAN_HPP_NOEXCEPT
  13440. {
  13441. primitiveCount = primitiveCount_;
  13442. return *this;
  13443. }
  13444. AccelerationStructureBuildRangeInfoKHR & setPrimitiveOffset( uint32_t primitiveOffset_ ) VULKAN_HPP_NOEXCEPT
  13445. {
  13446. primitiveOffset = primitiveOffset_;
  13447. return *this;
  13448. }
  13449. AccelerationStructureBuildRangeInfoKHR & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
  13450. {
  13451. firstVertex = firstVertex_;
  13452. return *this;
  13453. }
  13454. AccelerationStructureBuildRangeInfoKHR & setTransformOffset( uint32_t transformOffset_ ) VULKAN_HPP_NOEXCEPT
  13455. {
  13456. transformOffset = transformOffset_;
  13457. return *this;
  13458. }
  13459. operator VkAccelerationStructureBuildRangeInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  13460. {
  13461. return *reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR*>( this );
  13462. }
  13463. operator VkAccelerationStructureBuildRangeInfoKHR &() VULKAN_HPP_NOEXCEPT
  13464. {
  13465. return *reinterpret_cast<VkAccelerationStructureBuildRangeInfoKHR*>( this );
  13466. }
  13467. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13468. auto operator<=>( AccelerationStructureBuildRangeInfoKHR const& ) const = default;
  13469. #else
  13470. bool operator==( AccelerationStructureBuildRangeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13471. {
  13472. return ( primitiveCount == rhs.primitiveCount )
  13473. && ( primitiveOffset == rhs.primitiveOffset )
  13474. && ( firstVertex == rhs.firstVertex )
  13475. && ( transformOffset == rhs.transformOffset );
  13476. }
  13477. bool operator!=( AccelerationStructureBuildRangeInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13478. {
  13479. return !operator==( rhs );
  13480. }
  13481. #endif
  13482. public:
  13483. uint32_t primitiveCount = {};
  13484. uint32_t primitiveOffset = {};
  13485. uint32_t firstVertex = {};
  13486. uint32_t transformOffset = {};
  13487. };
  13488. static_assert( sizeof( AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ), "struct and wrapper have different size!" );
  13489. static_assert( std::is_standard_layout<AccelerationStructureBuildRangeInfoKHR>::value, "struct wrapper is not a standard layout!" );
  13490. struct AccelerationStructureBuildSizesInfoKHR
  13491. {
  13492. static const bool allowDuplicate = false;
  13493. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
  13494. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13495. VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR(VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ = {}) VULKAN_HPP_NOEXCEPT
  13496. : accelerationStructureSize( accelerationStructureSize_ ), updateScratchSize( updateScratchSize_ ), buildScratchSize( buildScratchSize_ )
  13497. {}
  13498. VULKAN_HPP_CONSTEXPR AccelerationStructureBuildSizesInfoKHR( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13499. AccelerationStructureBuildSizesInfoKHR( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13500. : AccelerationStructureBuildSizesInfoKHR( *reinterpret_cast<AccelerationStructureBuildSizesInfoKHR const *>( &rhs ) )
  13501. {}
  13502. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13503. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureBuildSizesInfoKHR & operator=( AccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13504. AccelerationStructureBuildSizesInfoKHR & operator=( VkAccelerationStructureBuildSizesInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13505. {
  13506. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR const *>( &rhs );
  13507. return *this;
  13508. }
  13509. AccelerationStructureBuildSizesInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13510. {
  13511. pNext = pNext_;
  13512. return *this;
  13513. }
  13514. AccelerationStructureBuildSizesInfoKHR & setAccelerationStructureSize( VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize_ ) VULKAN_HPP_NOEXCEPT
  13515. {
  13516. accelerationStructureSize = accelerationStructureSize_;
  13517. return *this;
  13518. }
  13519. AccelerationStructureBuildSizesInfoKHR & setUpdateScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize_ ) VULKAN_HPP_NOEXCEPT
  13520. {
  13521. updateScratchSize = updateScratchSize_;
  13522. return *this;
  13523. }
  13524. AccelerationStructureBuildSizesInfoKHR & setBuildScratchSize( VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize_ ) VULKAN_HPP_NOEXCEPT
  13525. {
  13526. buildScratchSize = buildScratchSize_;
  13527. return *this;
  13528. }
  13529. operator VkAccelerationStructureBuildSizesInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  13530. {
  13531. return *reinterpret_cast<const VkAccelerationStructureBuildSizesInfoKHR*>( this );
  13532. }
  13533. operator VkAccelerationStructureBuildSizesInfoKHR &() VULKAN_HPP_NOEXCEPT
  13534. {
  13535. return *reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR*>( this );
  13536. }
  13537. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13538. auto operator<=>( AccelerationStructureBuildSizesInfoKHR const& ) const = default;
  13539. #else
  13540. bool operator==( AccelerationStructureBuildSizesInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13541. {
  13542. return ( sType == rhs.sType )
  13543. && ( pNext == rhs.pNext )
  13544. && ( accelerationStructureSize == rhs.accelerationStructureSize )
  13545. && ( updateScratchSize == rhs.updateScratchSize )
  13546. && ( buildScratchSize == rhs.buildScratchSize );
  13547. }
  13548. bool operator!=( AccelerationStructureBuildSizesInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13549. {
  13550. return !operator==( rhs );
  13551. }
  13552. #endif
  13553. public:
  13554. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureBuildSizesInfoKHR;
  13555. const void* pNext = {};
  13556. VULKAN_HPP_NAMESPACE::DeviceSize accelerationStructureSize = {};
  13557. VULKAN_HPP_NAMESPACE::DeviceSize updateScratchSize = {};
  13558. VULKAN_HPP_NAMESPACE::DeviceSize buildScratchSize = {};
  13559. };
  13560. static_assert( sizeof( AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ), "struct and wrapper have different size!" );
  13561. static_assert( std::is_standard_layout<AccelerationStructureBuildSizesInfoKHR>::value, "struct wrapper is not a standard layout!" );
  13562. template <>
  13563. struct CppType<StructureType, StructureType::eAccelerationStructureBuildSizesInfoKHR>
  13564. {
  13565. using Type = AccelerationStructureBuildSizesInfoKHR;
  13566. };
  13567. class Buffer
  13568. {
  13569. public:
  13570. using CType = VkBuffer;
  13571. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBuffer;
  13572. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer;
  13573. public:
  13574. VULKAN_HPP_CONSTEXPR Buffer() VULKAN_HPP_NOEXCEPT
  13575. : m_buffer(VK_NULL_HANDLE)
  13576. {}
  13577. VULKAN_HPP_CONSTEXPR Buffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  13578. : m_buffer(VK_NULL_HANDLE)
  13579. {}
  13580. VULKAN_HPP_TYPESAFE_EXPLICIT Buffer( VkBuffer buffer ) VULKAN_HPP_NOEXCEPT
  13581. : m_buffer( buffer )
  13582. {}
  13583. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  13584. Buffer & operator=(VkBuffer buffer) VULKAN_HPP_NOEXCEPT
  13585. {
  13586. m_buffer = buffer;
  13587. return *this;
  13588. }
  13589. #endif
  13590. Buffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  13591. {
  13592. m_buffer = VK_NULL_HANDLE;
  13593. return *this;
  13594. }
  13595. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13596. auto operator<=>( Buffer const& ) const = default;
  13597. #else
  13598. bool operator==( Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  13599. {
  13600. return m_buffer == rhs.m_buffer;
  13601. }
  13602. bool operator!=(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  13603. {
  13604. return m_buffer != rhs.m_buffer;
  13605. }
  13606. bool operator<(Buffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  13607. {
  13608. return m_buffer < rhs.m_buffer;
  13609. }
  13610. #endif
  13611. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBuffer() const VULKAN_HPP_NOEXCEPT
  13612. {
  13613. return m_buffer;
  13614. }
  13615. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  13616. {
  13617. return m_buffer != VK_NULL_HANDLE;
  13618. }
  13619. bool operator!() const VULKAN_HPP_NOEXCEPT
  13620. {
  13621. return m_buffer == VK_NULL_HANDLE;
  13622. }
  13623. private:
  13624. VkBuffer m_buffer;
  13625. };
  13626. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
  13627. template <>
  13628. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBuffer>
  13629. {
  13630. using type = VULKAN_HPP_NAMESPACE::Buffer;
  13631. };
  13632. template <>
  13633. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBuffer>
  13634. {
  13635. using Type = VULKAN_HPP_NAMESPACE::Buffer;
  13636. };
  13637. template <>
  13638. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBuffer>
  13639. {
  13640. using Type = VULKAN_HPP_NAMESPACE::Buffer;
  13641. };
  13642. template <>
  13643. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Buffer>
  13644. {
  13645. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  13646. };
  13647. struct AccelerationStructureCreateInfoKHR
  13648. {
  13649. static const bool allowDuplicate = false;
  13650. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoKHR;
  13651. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13652. VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel, VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
  13653. : createFlags( createFlags_ ), buffer( buffer_ ), offset( offset_ ), size( size_ ), type( type_ ), deviceAddress( deviceAddress_ )
  13654. {}
  13655. VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoKHR( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13656. AccelerationStructureCreateInfoKHR( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13657. : AccelerationStructureCreateInfoKHR( *reinterpret_cast<AccelerationStructureCreateInfoKHR const *>( &rhs ) )
  13658. {}
  13659. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13660. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoKHR & operator=( AccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13661. AccelerationStructureCreateInfoKHR & operator=( VkAccelerationStructureCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  13662. {
  13663. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR const *>( &rhs );
  13664. return *this;
  13665. }
  13666. AccelerationStructureCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13667. {
  13668. pNext = pNext_;
  13669. return *this;
  13670. }
  13671. AccelerationStructureCreateInfoKHR & setCreateFlags( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags_ ) VULKAN_HPP_NOEXCEPT
  13672. {
  13673. createFlags = createFlags_;
  13674. return *this;
  13675. }
  13676. AccelerationStructureCreateInfoKHR & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  13677. {
  13678. buffer = buffer_;
  13679. return *this;
  13680. }
  13681. AccelerationStructureCreateInfoKHR & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  13682. {
  13683. offset = offset_;
  13684. return *this;
  13685. }
  13686. AccelerationStructureCreateInfoKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  13687. {
  13688. size = size_;
  13689. return *this;
  13690. }
  13691. AccelerationStructureCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
  13692. {
  13693. type = type_;
  13694. return *this;
  13695. }
  13696. AccelerationStructureCreateInfoKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
  13697. {
  13698. deviceAddress = deviceAddress_;
  13699. return *this;
  13700. }
  13701. operator VkAccelerationStructureCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  13702. {
  13703. return *reinterpret_cast<const VkAccelerationStructureCreateInfoKHR*>( this );
  13704. }
  13705. operator VkAccelerationStructureCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  13706. {
  13707. return *reinterpret_cast<VkAccelerationStructureCreateInfoKHR*>( this );
  13708. }
  13709. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13710. auto operator<=>( AccelerationStructureCreateInfoKHR const& ) const = default;
  13711. #else
  13712. bool operator==( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13713. {
  13714. return ( sType == rhs.sType )
  13715. && ( pNext == rhs.pNext )
  13716. && ( createFlags == rhs.createFlags )
  13717. && ( buffer == rhs.buffer )
  13718. && ( offset == rhs.offset )
  13719. && ( size == rhs.size )
  13720. && ( type == rhs.type )
  13721. && ( deviceAddress == rhs.deviceAddress );
  13722. }
  13723. bool operator!=( AccelerationStructureCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  13724. {
  13725. return !operator==( rhs );
  13726. }
  13727. #endif
  13728. public:
  13729. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoKHR;
  13730. const void* pNext = {};
  13731. VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR createFlags = {};
  13732. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  13733. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  13734. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  13735. VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR type = VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR::eTopLevel;
  13736. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  13737. };
  13738. static_assert( sizeof( AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ), "struct and wrapper have different size!" );
  13739. static_assert( std::is_standard_layout<AccelerationStructureCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  13740. template <>
  13741. struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoKHR>
  13742. {
  13743. using Type = AccelerationStructureCreateInfoKHR;
  13744. };
  13745. struct GeometryTrianglesNV
  13746. {
  13747. static const bool allowDuplicate = false;
  13748. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryTrianglesNV;
  13749. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13750. VULKAN_HPP_CONSTEXPR GeometryTrianglesNV(VULKAN_HPP_NAMESPACE::Buffer vertexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ = {}, uint32_t vertexCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ = {}, VULKAN_HPP_NAMESPACE::Format vertexFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Buffer indexData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ = {}, uint32_t indexCount_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16, VULKAN_HPP_NAMESPACE::Buffer transformData_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ = {}) VULKAN_HPP_NOEXCEPT
  13751. : vertexData( vertexData_ ), vertexOffset( vertexOffset_ ), vertexCount( vertexCount_ ), vertexStride( vertexStride_ ), vertexFormat( vertexFormat_ ), indexData( indexData_ ), indexOffset( indexOffset_ ), indexCount( indexCount_ ), indexType( indexType_ ), transformData( transformData_ ), transformOffset( transformOffset_ )
  13752. {}
  13753. VULKAN_HPP_CONSTEXPR GeometryTrianglesNV( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13754. GeometryTrianglesNV( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13755. : GeometryTrianglesNV( *reinterpret_cast<GeometryTrianglesNV const *>( &rhs ) )
  13756. {}
  13757. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13758. VULKAN_HPP_CONSTEXPR_14 GeometryTrianglesNV & operator=( GeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13759. GeometryTrianglesNV & operator=( VkGeometryTrianglesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13760. {
  13761. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const *>( &rhs );
  13762. return *this;
  13763. }
  13764. GeometryTrianglesNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13765. {
  13766. pNext = pNext_;
  13767. return *this;
  13768. }
  13769. GeometryTrianglesNV & setVertexData( VULKAN_HPP_NAMESPACE::Buffer vertexData_ ) VULKAN_HPP_NOEXCEPT
  13770. {
  13771. vertexData = vertexData_;
  13772. return *this;
  13773. }
  13774. GeometryTrianglesNV & setVertexOffset( VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset_ ) VULKAN_HPP_NOEXCEPT
  13775. {
  13776. vertexOffset = vertexOffset_;
  13777. return *this;
  13778. }
  13779. GeometryTrianglesNV & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
  13780. {
  13781. vertexCount = vertexCount_;
  13782. return *this;
  13783. }
  13784. GeometryTrianglesNV & setVertexStride( VULKAN_HPP_NAMESPACE::DeviceSize vertexStride_ ) VULKAN_HPP_NOEXCEPT
  13785. {
  13786. vertexStride = vertexStride_;
  13787. return *this;
  13788. }
  13789. GeometryTrianglesNV & setVertexFormat( VULKAN_HPP_NAMESPACE::Format vertexFormat_ ) VULKAN_HPP_NOEXCEPT
  13790. {
  13791. vertexFormat = vertexFormat_;
  13792. return *this;
  13793. }
  13794. GeometryTrianglesNV & setIndexData( VULKAN_HPP_NAMESPACE::Buffer indexData_ ) VULKAN_HPP_NOEXCEPT
  13795. {
  13796. indexData = indexData_;
  13797. return *this;
  13798. }
  13799. GeometryTrianglesNV & setIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize indexOffset_ ) VULKAN_HPP_NOEXCEPT
  13800. {
  13801. indexOffset = indexOffset_;
  13802. return *this;
  13803. }
  13804. GeometryTrianglesNV & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
  13805. {
  13806. indexCount = indexCount_;
  13807. return *this;
  13808. }
  13809. GeometryTrianglesNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
  13810. {
  13811. indexType = indexType_;
  13812. return *this;
  13813. }
  13814. GeometryTrianglesNV & setTransformData( VULKAN_HPP_NAMESPACE::Buffer transformData_ ) VULKAN_HPP_NOEXCEPT
  13815. {
  13816. transformData = transformData_;
  13817. return *this;
  13818. }
  13819. GeometryTrianglesNV & setTransformOffset( VULKAN_HPP_NAMESPACE::DeviceSize transformOffset_ ) VULKAN_HPP_NOEXCEPT
  13820. {
  13821. transformOffset = transformOffset_;
  13822. return *this;
  13823. }
  13824. operator VkGeometryTrianglesNV const&() const VULKAN_HPP_NOEXCEPT
  13825. {
  13826. return *reinterpret_cast<const VkGeometryTrianglesNV*>( this );
  13827. }
  13828. operator VkGeometryTrianglesNV &() VULKAN_HPP_NOEXCEPT
  13829. {
  13830. return *reinterpret_cast<VkGeometryTrianglesNV*>( this );
  13831. }
  13832. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13833. auto operator<=>( GeometryTrianglesNV const& ) const = default;
  13834. #else
  13835. bool operator==( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  13836. {
  13837. return ( sType == rhs.sType )
  13838. && ( pNext == rhs.pNext )
  13839. && ( vertexData == rhs.vertexData )
  13840. && ( vertexOffset == rhs.vertexOffset )
  13841. && ( vertexCount == rhs.vertexCount )
  13842. && ( vertexStride == rhs.vertexStride )
  13843. && ( vertexFormat == rhs.vertexFormat )
  13844. && ( indexData == rhs.indexData )
  13845. && ( indexOffset == rhs.indexOffset )
  13846. && ( indexCount == rhs.indexCount )
  13847. && ( indexType == rhs.indexType )
  13848. && ( transformData == rhs.transformData )
  13849. && ( transformOffset == rhs.transformOffset );
  13850. }
  13851. bool operator!=( GeometryTrianglesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  13852. {
  13853. return !operator==( rhs );
  13854. }
  13855. #endif
  13856. public:
  13857. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryTrianglesNV;
  13858. const void* pNext = {};
  13859. VULKAN_HPP_NAMESPACE::Buffer vertexData = {};
  13860. VULKAN_HPP_NAMESPACE::DeviceSize vertexOffset = {};
  13861. uint32_t vertexCount = {};
  13862. VULKAN_HPP_NAMESPACE::DeviceSize vertexStride = {};
  13863. VULKAN_HPP_NAMESPACE::Format vertexFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  13864. VULKAN_HPP_NAMESPACE::Buffer indexData = {};
  13865. VULKAN_HPP_NAMESPACE::DeviceSize indexOffset = {};
  13866. uint32_t indexCount = {};
  13867. VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
  13868. VULKAN_HPP_NAMESPACE::Buffer transformData = {};
  13869. VULKAN_HPP_NAMESPACE::DeviceSize transformOffset = {};
  13870. };
  13871. static_assert( sizeof( GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
  13872. static_assert( std::is_standard_layout<GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
  13873. template <>
  13874. struct CppType<StructureType, StructureType::eGeometryTrianglesNV>
  13875. {
  13876. using Type = GeometryTrianglesNV;
  13877. };
  13878. struct GeometryAABBNV
  13879. {
  13880. static const bool allowDuplicate = false;
  13881. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryAabbNV;
  13882. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13883. VULKAN_HPP_CONSTEXPR GeometryAABBNV(VULKAN_HPP_NAMESPACE::Buffer aabbData_ = {}, uint32_t numAABBs_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
  13884. : aabbData( aabbData_ ), numAABBs( numAABBs_ ), stride( stride_ ), offset( offset_ )
  13885. {}
  13886. VULKAN_HPP_CONSTEXPR GeometryAABBNV( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13887. GeometryAABBNV( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13888. : GeometryAABBNV( *reinterpret_cast<GeometryAABBNV const *>( &rhs ) )
  13889. {}
  13890. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13891. VULKAN_HPP_CONSTEXPR_14 GeometryAABBNV & operator=( GeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13892. GeometryAABBNV & operator=( VkGeometryAABBNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13893. {
  13894. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryAABBNV const *>( &rhs );
  13895. return *this;
  13896. }
  13897. GeometryAABBNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  13898. {
  13899. pNext = pNext_;
  13900. return *this;
  13901. }
  13902. GeometryAABBNV & setAabbData( VULKAN_HPP_NAMESPACE::Buffer aabbData_ ) VULKAN_HPP_NOEXCEPT
  13903. {
  13904. aabbData = aabbData_;
  13905. return *this;
  13906. }
  13907. GeometryAABBNV & setNumAABBs( uint32_t numAABBs_ ) VULKAN_HPP_NOEXCEPT
  13908. {
  13909. numAABBs = numAABBs_;
  13910. return *this;
  13911. }
  13912. GeometryAABBNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
  13913. {
  13914. stride = stride_;
  13915. return *this;
  13916. }
  13917. GeometryAABBNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  13918. {
  13919. offset = offset_;
  13920. return *this;
  13921. }
  13922. operator VkGeometryAABBNV const&() const VULKAN_HPP_NOEXCEPT
  13923. {
  13924. return *reinterpret_cast<const VkGeometryAABBNV*>( this );
  13925. }
  13926. operator VkGeometryAABBNV &() VULKAN_HPP_NOEXCEPT
  13927. {
  13928. return *reinterpret_cast<VkGeometryAABBNV*>( this );
  13929. }
  13930. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13931. auto operator<=>( GeometryAABBNV const& ) const = default;
  13932. #else
  13933. bool operator==( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  13934. {
  13935. return ( sType == rhs.sType )
  13936. && ( pNext == rhs.pNext )
  13937. && ( aabbData == rhs.aabbData )
  13938. && ( numAABBs == rhs.numAABBs )
  13939. && ( stride == rhs.stride )
  13940. && ( offset == rhs.offset );
  13941. }
  13942. bool operator!=( GeometryAABBNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  13943. {
  13944. return !operator==( rhs );
  13945. }
  13946. #endif
  13947. public:
  13948. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryAabbNV;
  13949. const void* pNext = {};
  13950. VULKAN_HPP_NAMESPACE::Buffer aabbData = {};
  13951. uint32_t numAABBs = {};
  13952. uint32_t stride = {};
  13953. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  13954. };
  13955. static_assert( sizeof( GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
  13956. static_assert( std::is_standard_layout<GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
  13957. template <>
  13958. struct CppType<StructureType, StructureType::eGeometryAabbNV>
  13959. {
  13960. using Type = GeometryAABBNV;
  13961. };
  13962. struct GeometryDataNV
  13963. {
  13964. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13965. VULKAN_HPP_CONSTEXPR GeometryDataNV(VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles_ = {}, VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs_ = {}) VULKAN_HPP_NOEXCEPT
  13966. : triangles( triangles_ ), aabbs( aabbs_ )
  13967. {}
  13968. VULKAN_HPP_CONSTEXPR GeometryDataNV( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13969. GeometryDataNV( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13970. : GeometryDataNV( *reinterpret_cast<GeometryDataNV const *>( &rhs ) )
  13971. {}
  13972. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  13973. VULKAN_HPP_CONSTEXPR_14 GeometryDataNV & operator=( GeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  13974. GeometryDataNV & operator=( VkGeometryDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
  13975. {
  13976. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryDataNV const *>( &rhs );
  13977. return *this;
  13978. }
  13979. GeometryDataNV & setTriangles( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV const & triangles_ ) VULKAN_HPP_NOEXCEPT
  13980. {
  13981. triangles = triangles_;
  13982. return *this;
  13983. }
  13984. GeometryDataNV & setAabbs( VULKAN_HPP_NAMESPACE::GeometryAABBNV const & aabbs_ ) VULKAN_HPP_NOEXCEPT
  13985. {
  13986. aabbs = aabbs_;
  13987. return *this;
  13988. }
  13989. operator VkGeometryDataNV const&() const VULKAN_HPP_NOEXCEPT
  13990. {
  13991. return *reinterpret_cast<const VkGeometryDataNV*>( this );
  13992. }
  13993. operator VkGeometryDataNV &() VULKAN_HPP_NOEXCEPT
  13994. {
  13995. return *reinterpret_cast<VkGeometryDataNV*>( this );
  13996. }
  13997. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  13998. auto operator<=>( GeometryDataNV const& ) const = default;
  13999. #else
  14000. bool operator==( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14001. {
  14002. return ( triangles == rhs.triangles )
  14003. && ( aabbs == rhs.aabbs );
  14004. }
  14005. bool operator!=( GeometryDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14006. {
  14007. return !operator==( rhs );
  14008. }
  14009. #endif
  14010. public:
  14011. VULKAN_HPP_NAMESPACE::GeometryTrianglesNV triangles = {};
  14012. VULKAN_HPP_NAMESPACE::GeometryAABBNV aabbs = {};
  14013. };
  14014. static_assert( sizeof( GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
  14015. static_assert( std::is_standard_layout<GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
  14016. struct GeometryNV
  14017. {
  14018. static const bool allowDuplicate = false;
  14019. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeometryNV;
  14020. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14021. VULKAN_HPP_CONSTEXPR GeometryNV(VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles, VULKAN_HPP_NAMESPACE::GeometryDataNV geometry_ = {}, VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ = {}) VULKAN_HPP_NOEXCEPT
  14022. : geometryType( geometryType_ ), geometry( geometry_ ), flags( flags_ )
  14023. {}
  14024. VULKAN_HPP_CONSTEXPR GeometryNV( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14025. GeometryNV( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14026. : GeometryNV( *reinterpret_cast<GeometryNV const *>( &rhs ) )
  14027. {}
  14028. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14029. VULKAN_HPP_CONSTEXPR_14 GeometryNV & operator=( GeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14030. GeometryNV & operator=( VkGeometryNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14031. {
  14032. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeometryNV const *>( &rhs );
  14033. return *this;
  14034. }
  14035. GeometryNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14036. {
  14037. pNext = pNext_;
  14038. return *this;
  14039. }
  14040. GeometryNV & setGeometryType( VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType_ ) VULKAN_HPP_NOEXCEPT
  14041. {
  14042. geometryType = geometryType_;
  14043. return *this;
  14044. }
  14045. GeometryNV & setGeometry( VULKAN_HPP_NAMESPACE::GeometryDataNV const & geometry_ ) VULKAN_HPP_NOEXCEPT
  14046. {
  14047. geometry = geometry_;
  14048. return *this;
  14049. }
  14050. GeometryNV & setFlags( VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  14051. {
  14052. flags = flags_;
  14053. return *this;
  14054. }
  14055. operator VkGeometryNV const&() const VULKAN_HPP_NOEXCEPT
  14056. {
  14057. return *reinterpret_cast<const VkGeometryNV*>( this );
  14058. }
  14059. operator VkGeometryNV &() VULKAN_HPP_NOEXCEPT
  14060. {
  14061. return *reinterpret_cast<VkGeometryNV*>( this );
  14062. }
  14063. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14064. auto operator<=>( GeometryNV const& ) const = default;
  14065. #else
  14066. bool operator==( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14067. {
  14068. return ( sType == rhs.sType )
  14069. && ( pNext == rhs.pNext )
  14070. && ( geometryType == rhs.geometryType )
  14071. && ( geometry == rhs.geometry )
  14072. && ( flags == rhs.flags );
  14073. }
  14074. bool operator!=( GeometryNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14075. {
  14076. return !operator==( rhs );
  14077. }
  14078. #endif
  14079. public:
  14080. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeometryNV;
  14081. const void* pNext = {};
  14082. VULKAN_HPP_NAMESPACE::GeometryTypeKHR geometryType = VULKAN_HPP_NAMESPACE::GeometryTypeKHR::eTriangles;
  14083. VULKAN_HPP_NAMESPACE::GeometryDataNV geometry = {};
  14084. VULKAN_HPP_NAMESPACE::GeometryFlagsKHR flags = {};
  14085. };
  14086. static_assert( sizeof( GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
  14087. static_assert( std::is_standard_layout<GeometryNV>::value, "struct wrapper is not a standard layout!" );
  14088. template <>
  14089. struct CppType<StructureType, StructureType::eGeometryNV>
  14090. {
  14091. using Type = GeometryNV;
  14092. };
  14093. struct AccelerationStructureInfoNV
  14094. {
  14095. static const bool allowDuplicate = false;
  14096. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureInfoNV;
  14097. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14098. VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ = {}, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ = {}, uint32_t instanceCount_ = {}, uint32_t geometryCount_ = {}, const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ = {}) VULKAN_HPP_NOEXCEPT
  14099. : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( geometryCount_ ), pGeometries( pGeometries_ )
  14100. {}
  14101. VULKAN_HPP_CONSTEXPR AccelerationStructureInfoNV( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14102. AccelerationStructureInfoNV( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14103. : AccelerationStructureInfoNV( *reinterpret_cast<AccelerationStructureInfoNV const *>( &rhs ) )
  14104. {}
  14105. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  14106. AccelerationStructureInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_, VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_, uint32_t instanceCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ )
  14107. : type( type_ ), flags( flags_ ), instanceCount( instanceCount_ ), geometryCount( static_cast<uint32_t>( geometries_.size() ) ), pGeometries( geometries_.data() )
  14108. {}
  14109. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  14110. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14111. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInfoNV & operator=( AccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14112. AccelerationStructureInfoNV & operator=( VkAccelerationStructureInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14113. {
  14114. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const *>( &rhs );
  14115. return *this;
  14116. }
  14117. AccelerationStructureInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14118. {
  14119. pNext = pNext_;
  14120. return *this;
  14121. }
  14122. AccelerationStructureInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type_ ) VULKAN_HPP_NOEXCEPT
  14123. {
  14124. type = type_;
  14125. return *this;
  14126. }
  14127. AccelerationStructureInfoNV & setFlags( VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  14128. {
  14129. flags = flags_;
  14130. return *this;
  14131. }
  14132. AccelerationStructureInfoNV & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
  14133. {
  14134. instanceCount = instanceCount_;
  14135. return *this;
  14136. }
  14137. AccelerationStructureInfoNV & setGeometryCount( uint32_t geometryCount_ ) VULKAN_HPP_NOEXCEPT
  14138. {
  14139. geometryCount = geometryCount_;
  14140. return *this;
  14141. }
  14142. AccelerationStructureInfoNV & setPGeometries( const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries_ ) VULKAN_HPP_NOEXCEPT
  14143. {
  14144. pGeometries = pGeometries_;
  14145. return *this;
  14146. }
  14147. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  14148. AccelerationStructureInfoNV & setGeometries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GeometryNV> const & geometries_ ) VULKAN_HPP_NOEXCEPT
  14149. {
  14150. geometryCount = static_cast<uint32_t>( geometries_.size() );
  14151. pGeometries = geometries_.data();
  14152. return *this;
  14153. }
  14154. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  14155. operator VkAccelerationStructureInfoNV const&() const VULKAN_HPP_NOEXCEPT
  14156. {
  14157. return *reinterpret_cast<const VkAccelerationStructureInfoNV*>( this );
  14158. }
  14159. operator VkAccelerationStructureInfoNV &() VULKAN_HPP_NOEXCEPT
  14160. {
  14161. return *reinterpret_cast<VkAccelerationStructureInfoNV*>( this );
  14162. }
  14163. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14164. auto operator<=>( AccelerationStructureInfoNV const& ) const = default;
  14165. #else
  14166. bool operator==( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14167. {
  14168. return ( sType == rhs.sType )
  14169. && ( pNext == rhs.pNext )
  14170. && ( type == rhs.type )
  14171. && ( flags == rhs.flags )
  14172. && ( instanceCount == rhs.instanceCount )
  14173. && ( geometryCount == rhs.geometryCount )
  14174. && ( pGeometries == rhs.pGeometries );
  14175. }
  14176. bool operator!=( AccelerationStructureInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14177. {
  14178. return !operator==( rhs );
  14179. }
  14180. #endif
  14181. public:
  14182. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureInfoNV;
  14183. const void* pNext = {};
  14184. VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV type = {};
  14185. VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsNV flags = {};
  14186. uint32_t instanceCount = {};
  14187. uint32_t geometryCount = {};
  14188. const VULKAN_HPP_NAMESPACE::GeometryNV* pGeometries = {};
  14189. };
  14190. static_assert( sizeof( AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ), "struct and wrapper have different size!" );
  14191. static_assert( std::is_standard_layout<AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
  14192. template <>
  14193. struct CppType<StructureType, StructureType::eAccelerationStructureInfoNV>
  14194. {
  14195. using Type = AccelerationStructureInfoNV;
  14196. };
  14197. struct AccelerationStructureCreateInfoNV
  14198. {
  14199. static const bool allowDuplicate = false;
  14200. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureCreateInfoNV;
  14201. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14202. VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info_ = {}) VULKAN_HPP_NOEXCEPT
  14203. : compactedSize( compactedSize_ ), info( info_ )
  14204. {}
  14205. VULKAN_HPP_CONSTEXPR AccelerationStructureCreateInfoNV( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14206. AccelerationStructureCreateInfoNV( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14207. : AccelerationStructureCreateInfoNV( *reinterpret_cast<AccelerationStructureCreateInfoNV const *>( &rhs ) )
  14208. {}
  14209. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14210. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureCreateInfoNV & operator=( AccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14211. AccelerationStructureCreateInfoNV & operator=( VkAccelerationStructureCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14212. {
  14213. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV const *>( &rhs );
  14214. return *this;
  14215. }
  14216. AccelerationStructureCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14217. {
  14218. pNext = pNext_;
  14219. return *this;
  14220. }
  14221. AccelerationStructureCreateInfoNV & setCompactedSize( VULKAN_HPP_NAMESPACE::DeviceSize compactedSize_ ) VULKAN_HPP_NOEXCEPT
  14222. {
  14223. compactedSize = compactedSize_;
  14224. return *this;
  14225. }
  14226. AccelerationStructureCreateInfoNV & setInfo( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV const & info_ ) VULKAN_HPP_NOEXCEPT
  14227. {
  14228. info = info_;
  14229. return *this;
  14230. }
  14231. operator VkAccelerationStructureCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  14232. {
  14233. return *reinterpret_cast<const VkAccelerationStructureCreateInfoNV*>( this );
  14234. }
  14235. operator VkAccelerationStructureCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  14236. {
  14237. return *reinterpret_cast<VkAccelerationStructureCreateInfoNV*>( this );
  14238. }
  14239. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14240. auto operator<=>( AccelerationStructureCreateInfoNV const& ) const = default;
  14241. #else
  14242. bool operator==( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14243. {
  14244. return ( sType == rhs.sType )
  14245. && ( pNext == rhs.pNext )
  14246. && ( compactedSize == rhs.compactedSize )
  14247. && ( info == rhs.info );
  14248. }
  14249. bool operator!=( AccelerationStructureCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14250. {
  14251. return !operator==( rhs );
  14252. }
  14253. #endif
  14254. public:
  14255. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureCreateInfoNV;
  14256. const void* pNext = {};
  14257. VULKAN_HPP_NAMESPACE::DeviceSize compactedSize = {};
  14258. VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV info = {};
  14259. };
  14260. static_assert( sizeof( AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ), "struct and wrapper have different size!" );
  14261. static_assert( std::is_standard_layout<AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  14262. template <>
  14263. struct CppType<StructureType, StructureType::eAccelerationStructureCreateInfoNV>
  14264. {
  14265. using Type = AccelerationStructureCreateInfoNV;
  14266. };
  14267. struct AccelerationStructureDeviceAddressInfoKHR
  14268. {
  14269. static const bool allowDuplicate = false;
  14270. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
  14271. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14272. VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
  14273. : accelerationStructure( accelerationStructure_ )
  14274. {}
  14275. VULKAN_HPP_CONSTEXPR AccelerationStructureDeviceAddressInfoKHR( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14276. AccelerationStructureDeviceAddressInfoKHR( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14277. : AccelerationStructureDeviceAddressInfoKHR( *reinterpret_cast<AccelerationStructureDeviceAddressInfoKHR const *>( &rhs ) )
  14278. {}
  14279. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14280. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureDeviceAddressInfoKHR & operator=( AccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14281. AccelerationStructureDeviceAddressInfoKHR & operator=( VkAccelerationStructureDeviceAddressInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14282. {
  14283. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR const *>( &rhs );
  14284. return *this;
  14285. }
  14286. AccelerationStructureDeviceAddressInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14287. {
  14288. pNext = pNext_;
  14289. return *this;
  14290. }
  14291. AccelerationStructureDeviceAddressInfoKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  14292. {
  14293. accelerationStructure = accelerationStructure_;
  14294. return *this;
  14295. }
  14296. operator VkAccelerationStructureDeviceAddressInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  14297. {
  14298. return *reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR*>( this );
  14299. }
  14300. operator VkAccelerationStructureDeviceAddressInfoKHR &() VULKAN_HPP_NOEXCEPT
  14301. {
  14302. return *reinterpret_cast<VkAccelerationStructureDeviceAddressInfoKHR*>( this );
  14303. }
  14304. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14305. auto operator<=>( AccelerationStructureDeviceAddressInfoKHR const& ) const = default;
  14306. #else
  14307. bool operator==( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14308. {
  14309. return ( sType == rhs.sType )
  14310. && ( pNext == rhs.pNext )
  14311. && ( accelerationStructure == rhs.accelerationStructure );
  14312. }
  14313. bool operator!=( AccelerationStructureDeviceAddressInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14314. {
  14315. return !operator==( rhs );
  14316. }
  14317. #endif
  14318. public:
  14319. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureDeviceAddressInfoKHR;
  14320. const void* pNext = {};
  14321. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure = {};
  14322. };
  14323. static_assert( sizeof( AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ), "struct and wrapper have different size!" );
  14324. static_assert( std::is_standard_layout<AccelerationStructureDeviceAddressInfoKHR>::value, "struct wrapper is not a standard layout!" );
  14325. template <>
  14326. struct CppType<StructureType, StructureType::eAccelerationStructureDeviceAddressInfoKHR>
  14327. {
  14328. using Type = AccelerationStructureDeviceAddressInfoKHR;
  14329. };
  14330. struct TransformMatrixKHR
  14331. {
  14332. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14333. VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR(std::array<std::array<float,4>,3> const& matrix_ = {}) VULKAN_HPP_NOEXCEPT
  14334. : matrix( matrix_ )
  14335. {}
  14336. VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14337. TransformMatrixKHR( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14338. : TransformMatrixKHR( *reinterpret_cast<TransformMatrixKHR const *>( &rhs ) )
  14339. {}
  14340. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14341. VULKAN_HPP_CONSTEXPR_14 TransformMatrixKHR & operator=( TransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14342. TransformMatrixKHR & operator=( VkTransformMatrixKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14343. {
  14344. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TransformMatrixKHR const *>( &rhs );
  14345. return *this;
  14346. }
  14347. TransformMatrixKHR & setMatrix( std::array<std::array<float,4>,3> matrix_ ) VULKAN_HPP_NOEXCEPT
  14348. {
  14349. matrix = matrix_;
  14350. return *this;
  14351. }
  14352. operator VkTransformMatrixKHR const&() const VULKAN_HPP_NOEXCEPT
  14353. {
  14354. return *reinterpret_cast<const VkTransformMatrixKHR*>( this );
  14355. }
  14356. operator VkTransformMatrixKHR &() VULKAN_HPP_NOEXCEPT
  14357. {
  14358. return *reinterpret_cast<VkTransformMatrixKHR*>( this );
  14359. }
  14360. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14361. auto operator<=>( TransformMatrixKHR const& ) const = default;
  14362. #else
  14363. bool operator==( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14364. {
  14365. return ( matrix == rhs.matrix );
  14366. }
  14367. bool operator!=( TransformMatrixKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14368. {
  14369. return !operator==( rhs );
  14370. }
  14371. #endif
  14372. public:
  14373. VULKAN_HPP_NAMESPACE::ArrayWrapper2D<float, 3, 4> matrix = {};
  14374. };
  14375. static_assert( sizeof( TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
  14376. static_assert( std::is_standard_layout<TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
  14377. using TransformMatrixNV = TransformMatrixKHR;
  14378. struct AccelerationStructureInstanceKHR
  14379. {
  14380. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14381. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR(VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform_ = {}, uint32_t instanceCustomIndex_ = {}, uint32_t mask_ = {}, uint32_t instanceShaderBindingTableRecordOffset_ = {}, VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ = {}, uint64_t accelerationStructureReference_ = {}) VULKAN_HPP_NOEXCEPT
  14382. : transform( transform_ ), instanceCustomIndex( instanceCustomIndex_ ), mask( mask_ ), instanceShaderBindingTableRecordOffset( instanceShaderBindingTableRecordOffset_ ), flags( flags_ ), accelerationStructureReference( accelerationStructureReference_ )
  14383. {}
  14384. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14385. AccelerationStructureInstanceKHR( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14386. : AccelerationStructureInstanceKHR( *reinterpret_cast<AccelerationStructureInstanceKHR const *>( &rhs ) )
  14387. {}
  14388. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14389. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureInstanceKHR & operator=( AccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14390. AccelerationStructureInstanceKHR & operator=( VkAccelerationStructureInstanceKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14391. {
  14392. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR const *>( &rhs );
  14393. return *this;
  14394. }
  14395. AccelerationStructureInstanceKHR & setTransform( VULKAN_HPP_NAMESPACE::TransformMatrixKHR const & transform_ ) VULKAN_HPP_NOEXCEPT
  14396. {
  14397. transform = transform_;
  14398. return *this;
  14399. }
  14400. AccelerationStructureInstanceKHR & setInstanceCustomIndex( uint32_t instanceCustomIndex_ ) VULKAN_HPP_NOEXCEPT
  14401. {
  14402. instanceCustomIndex = instanceCustomIndex_;
  14403. return *this;
  14404. }
  14405. AccelerationStructureInstanceKHR & setMask( uint32_t mask_ ) VULKAN_HPP_NOEXCEPT
  14406. {
  14407. mask = mask_;
  14408. return *this;
  14409. }
  14410. AccelerationStructureInstanceKHR & setInstanceShaderBindingTableRecordOffset( uint32_t instanceShaderBindingTableRecordOffset_ ) VULKAN_HPP_NOEXCEPT
  14411. {
  14412. instanceShaderBindingTableRecordOffset = instanceShaderBindingTableRecordOffset_;
  14413. return *this;
  14414. }
  14415. AccelerationStructureInstanceKHR & setFlags( VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  14416. {
  14417. flags = *reinterpret_cast<VkGeometryInstanceFlagsKHR*>(&flags_);
  14418. return *this;
  14419. }
  14420. AccelerationStructureInstanceKHR & setAccelerationStructureReference( uint64_t accelerationStructureReference_ ) VULKAN_HPP_NOEXCEPT
  14421. {
  14422. accelerationStructureReference = accelerationStructureReference_;
  14423. return *this;
  14424. }
  14425. operator VkAccelerationStructureInstanceKHR const&() const VULKAN_HPP_NOEXCEPT
  14426. {
  14427. return *reinterpret_cast<const VkAccelerationStructureInstanceKHR*>( this );
  14428. }
  14429. operator VkAccelerationStructureInstanceKHR &() VULKAN_HPP_NOEXCEPT
  14430. {
  14431. return *reinterpret_cast<VkAccelerationStructureInstanceKHR*>( this );
  14432. }
  14433. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14434. auto operator<=>( AccelerationStructureInstanceKHR const& ) const = default;
  14435. #else
  14436. bool operator==( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14437. {
  14438. return ( transform == rhs.transform )
  14439. && ( instanceCustomIndex == rhs.instanceCustomIndex )
  14440. && ( mask == rhs.mask )
  14441. && ( instanceShaderBindingTableRecordOffset == rhs.instanceShaderBindingTableRecordOffset )
  14442. && ( flags == rhs.flags )
  14443. && ( accelerationStructureReference == rhs.accelerationStructureReference );
  14444. }
  14445. bool operator!=( AccelerationStructureInstanceKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14446. {
  14447. return !operator==( rhs );
  14448. }
  14449. #endif
  14450. public:
  14451. VULKAN_HPP_NAMESPACE::TransformMatrixKHR transform = {};
  14452. uint32_t instanceCustomIndex : 24;
  14453. uint32_t mask : 8;
  14454. uint32_t instanceShaderBindingTableRecordOffset : 24;
  14455. VkGeometryInstanceFlagsKHR flags : 8;
  14456. uint64_t accelerationStructureReference = {};
  14457. };
  14458. static_assert( sizeof( AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ), "struct and wrapper have different size!" );
  14459. static_assert( std::is_standard_layout<AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
  14460. using AccelerationStructureInstanceNV = AccelerationStructureInstanceKHR;
  14461. class AccelerationStructureNV
  14462. {
  14463. public:
  14464. using CType = VkAccelerationStructureNV;
  14465. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV;
  14466. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV;
  14467. public:
  14468. VULKAN_HPP_CONSTEXPR AccelerationStructureNV() VULKAN_HPP_NOEXCEPT
  14469. : m_accelerationStructureNV(VK_NULL_HANDLE)
  14470. {}
  14471. VULKAN_HPP_CONSTEXPR AccelerationStructureNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14472. : m_accelerationStructureNV(VK_NULL_HANDLE)
  14473. {}
  14474. VULKAN_HPP_TYPESAFE_EXPLICIT AccelerationStructureNV( VkAccelerationStructureNV accelerationStructureNV ) VULKAN_HPP_NOEXCEPT
  14475. : m_accelerationStructureNV( accelerationStructureNV )
  14476. {}
  14477. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  14478. AccelerationStructureNV & operator=(VkAccelerationStructureNV accelerationStructureNV) VULKAN_HPP_NOEXCEPT
  14479. {
  14480. m_accelerationStructureNV = accelerationStructureNV;
  14481. return *this;
  14482. }
  14483. #endif
  14484. AccelerationStructureNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14485. {
  14486. m_accelerationStructureNV = VK_NULL_HANDLE;
  14487. return *this;
  14488. }
  14489. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14490. auto operator<=>( AccelerationStructureNV const& ) const = default;
  14491. #else
  14492. bool operator==( AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  14493. {
  14494. return m_accelerationStructureNV == rhs.m_accelerationStructureNV;
  14495. }
  14496. bool operator!=(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  14497. {
  14498. return m_accelerationStructureNV != rhs.m_accelerationStructureNV;
  14499. }
  14500. bool operator<(AccelerationStructureNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  14501. {
  14502. return m_accelerationStructureNV < rhs.m_accelerationStructureNV;
  14503. }
  14504. #endif
  14505. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkAccelerationStructureNV() const VULKAN_HPP_NOEXCEPT
  14506. {
  14507. return m_accelerationStructureNV;
  14508. }
  14509. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  14510. {
  14511. return m_accelerationStructureNV != VK_NULL_HANDLE;
  14512. }
  14513. bool operator!() const VULKAN_HPP_NOEXCEPT
  14514. {
  14515. return m_accelerationStructureNV == VK_NULL_HANDLE;
  14516. }
  14517. private:
  14518. VkAccelerationStructureNV m_accelerationStructureNV;
  14519. };
  14520. static_assert( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ), "handle and wrapper have different size!" );
  14521. template <>
  14522. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eAccelerationStructureNV>
  14523. {
  14524. using type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  14525. };
  14526. template <>
  14527. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eAccelerationStructureNV>
  14528. {
  14529. using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  14530. };
  14531. template <>
  14532. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eAccelerationStructureNV>
  14533. {
  14534. using Type = VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
  14535. };
  14536. template <>
  14537. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
  14538. {
  14539. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  14540. };
  14541. struct AccelerationStructureMemoryRequirementsInfoNV
  14542. {
  14543. static const bool allowDuplicate = false;
  14544. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
  14545. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14546. VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject, VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}) VULKAN_HPP_NOEXCEPT
  14547. : type( type_ ), accelerationStructure( accelerationStructure_ )
  14548. {}
  14549. VULKAN_HPP_CONSTEXPR AccelerationStructureMemoryRequirementsInfoNV( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14550. AccelerationStructureMemoryRequirementsInfoNV( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14551. : AccelerationStructureMemoryRequirementsInfoNV( *reinterpret_cast<AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs ) )
  14552. {}
  14553. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14554. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureMemoryRequirementsInfoNV & operator=( AccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14555. AccelerationStructureMemoryRequirementsInfoNV & operator=( VkAccelerationStructureMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  14556. {
  14557. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV const *>( &rhs );
  14558. return *this;
  14559. }
  14560. AccelerationStructureMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14561. {
  14562. pNext = pNext_;
  14563. return *this;
  14564. }
  14565. AccelerationStructureMemoryRequirementsInfoNV & setType( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type_ ) VULKAN_HPP_NOEXCEPT
  14566. {
  14567. type = type_;
  14568. return *this;
  14569. }
  14570. AccelerationStructureMemoryRequirementsInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  14571. {
  14572. accelerationStructure = accelerationStructure_;
  14573. return *this;
  14574. }
  14575. operator VkAccelerationStructureMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
  14576. {
  14577. return *reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
  14578. }
  14579. operator VkAccelerationStructureMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
  14580. {
  14581. return *reinterpret_cast<VkAccelerationStructureMemoryRequirementsInfoNV*>( this );
  14582. }
  14583. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14584. auto operator<=>( AccelerationStructureMemoryRequirementsInfoNV const& ) const = default;
  14585. #else
  14586. bool operator==( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14587. {
  14588. return ( sType == rhs.sType )
  14589. && ( pNext == rhs.pNext )
  14590. && ( type == rhs.type )
  14591. && ( accelerationStructure == rhs.accelerationStructure );
  14592. }
  14593. bool operator!=( AccelerationStructureMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  14594. {
  14595. return !operator==( rhs );
  14596. }
  14597. #endif
  14598. public:
  14599. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureMemoryRequirementsInfoNV;
  14600. const void* pNext = {};
  14601. VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV type = VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV::eObject;
  14602. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
  14603. };
  14604. static_assert( sizeof( AccelerationStructureMemoryRequirementsInfoNV ) == sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
  14605. static_assert( std::is_standard_layout<AccelerationStructureMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
  14606. template <>
  14607. struct CppType<StructureType, StructureType::eAccelerationStructureMemoryRequirementsInfoNV>
  14608. {
  14609. using Type = AccelerationStructureMemoryRequirementsInfoNV;
  14610. };
  14611. struct AccelerationStructureVersionInfoKHR
  14612. {
  14613. static const bool allowDuplicate = false;
  14614. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAccelerationStructureVersionInfoKHR;
  14615. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14616. VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR(const uint8_t* pVersionData_ = {}) VULKAN_HPP_NOEXCEPT
  14617. : pVersionData( pVersionData_ )
  14618. {}
  14619. VULKAN_HPP_CONSTEXPR AccelerationStructureVersionInfoKHR( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14620. AccelerationStructureVersionInfoKHR( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14621. : AccelerationStructureVersionInfoKHR( *reinterpret_cast<AccelerationStructureVersionInfoKHR const *>( &rhs ) )
  14622. {}
  14623. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14624. VULKAN_HPP_CONSTEXPR_14 AccelerationStructureVersionInfoKHR & operator=( AccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14625. AccelerationStructureVersionInfoKHR & operator=( VkAccelerationStructureVersionInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14626. {
  14627. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR const *>( &rhs );
  14628. return *this;
  14629. }
  14630. AccelerationStructureVersionInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14631. {
  14632. pNext = pNext_;
  14633. return *this;
  14634. }
  14635. AccelerationStructureVersionInfoKHR & setPVersionData( const uint8_t* pVersionData_ ) VULKAN_HPP_NOEXCEPT
  14636. {
  14637. pVersionData = pVersionData_;
  14638. return *this;
  14639. }
  14640. operator VkAccelerationStructureVersionInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  14641. {
  14642. return *reinterpret_cast<const VkAccelerationStructureVersionInfoKHR*>( this );
  14643. }
  14644. operator VkAccelerationStructureVersionInfoKHR &() VULKAN_HPP_NOEXCEPT
  14645. {
  14646. return *reinterpret_cast<VkAccelerationStructureVersionInfoKHR*>( this );
  14647. }
  14648. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14649. auto operator<=>( AccelerationStructureVersionInfoKHR const& ) const = default;
  14650. #else
  14651. bool operator==( AccelerationStructureVersionInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14652. {
  14653. return ( sType == rhs.sType )
  14654. && ( pNext == rhs.pNext )
  14655. && ( pVersionData == rhs.pVersionData );
  14656. }
  14657. bool operator!=( AccelerationStructureVersionInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14658. {
  14659. return !operator==( rhs );
  14660. }
  14661. #endif
  14662. public:
  14663. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAccelerationStructureVersionInfoKHR;
  14664. const void* pNext = {};
  14665. const uint8_t* pVersionData = {};
  14666. };
  14667. static_assert( sizeof( AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ), "struct and wrapper have different size!" );
  14668. static_assert( std::is_standard_layout<AccelerationStructureVersionInfoKHR>::value, "struct wrapper is not a standard layout!" );
  14669. template <>
  14670. struct CppType<StructureType, StructureType::eAccelerationStructureVersionInfoKHR>
  14671. {
  14672. using Type = AccelerationStructureVersionInfoKHR;
  14673. };
  14674. class SwapchainKHR
  14675. {
  14676. public:
  14677. using CType = VkSwapchainKHR;
  14678. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR;
  14679. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR;
  14680. public:
  14681. VULKAN_HPP_CONSTEXPR SwapchainKHR() VULKAN_HPP_NOEXCEPT
  14682. : m_swapchainKHR(VK_NULL_HANDLE)
  14683. {}
  14684. VULKAN_HPP_CONSTEXPR SwapchainKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14685. : m_swapchainKHR(VK_NULL_HANDLE)
  14686. {}
  14687. VULKAN_HPP_TYPESAFE_EXPLICIT SwapchainKHR( VkSwapchainKHR swapchainKHR ) VULKAN_HPP_NOEXCEPT
  14688. : m_swapchainKHR( swapchainKHR )
  14689. {}
  14690. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  14691. SwapchainKHR & operator=(VkSwapchainKHR swapchainKHR) VULKAN_HPP_NOEXCEPT
  14692. {
  14693. m_swapchainKHR = swapchainKHR;
  14694. return *this;
  14695. }
  14696. #endif
  14697. SwapchainKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14698. {
  14699. m_swapchainKHR = VK_NULL_HANDLE;
  14700. return *this;
  14701. }
  14702. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14703. auto operator<=>( SwapchainKHR const& ) const = default;
  14704. #else
  14705. bool operator==( SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  14706. {
  14707. return m_swapchainKHR == rhs.m_swapchainKHR;
  14708. }
  14709. bool operator!=(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  14710. {
  14711. return m_swapchainKHR != rhs.m_swapchainKHR;
  14712. }
  14713. bool operator<(SwapchainKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  14714. {
  14715. return m_swapchainKHR < rhs.m_swapchainKHR;
  14716. }
  14717. #endif
  14718. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSwapchainKHR() const VULKAN_HPP_NOEXCEPT
  14719. {
  14720. return m_swapchainKHR;
  14721. }
  14722. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  14723. {
  14724. return m_swapchainKHR != VK_NULL_HANDLE;
  14725. }
  14726. bool operator!() const VULKAN_HPP_NOEXCEPT
  14727. {
  14728. return m_swapchainKHR == VK_NULL_HANDLE;
  14729. }
  14730. private:
  14731. VkSwapchainKHR m_swapchainKHR;
  14732. };
  14733. static_assert( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
  14734. template <>
  14735. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSwapchainKHR>
  14736. {
  14737. using type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  14738. };
  14739. template <>
  14740. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSwapchainKHR>
  14741. {
  14742. using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  14743. };
  14744. template <>
  14745. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSwapchainKHR>
  14746. {
  14747. using Type = VULKAN_HPP_NAMESPACE::SwapchainKHR;
  14748. };
  14749. template <>
  14750. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SwapchainKHR>
  14751. {
  14752. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  14753. };
  14754. class Semaphore
  14755. {
  14756. public:
  14757. using CType = VkSemaphore;
  14758. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore;
  14759. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore;
  14760. public:
  14761. VULKAN_HPP_CONSTEXPR Semaphore() VULKAN_HPP_NOEXCEPT
  14762. : m_semaphore(VK_NULL_HANDLE)
  14763. {}
  14764. VULKAN_HPP_CONSTEXPR Semaphore( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14765. : m_semaphore(VK_NULL_HANDLE)
  14766. {}
  14767. VULKAN_HPP_TYPESAFE_EXPLICIT Semaphore( VkSemaphore semaphore ) VULKAN_HPP_NOEXCEPT
  14768. : m_semaphore( semaphore )
  14769. {}
  14770. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  14771. Semaphore & operator=(VkSemaphore semaphore) VULKAN_HPP_NOEXCEPT
  14772. {
  14773. m_semaphore = semaphore;
  14774. return *this;
  14775. }
  14776. #endif
  14777. Semaphore & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14778. {
  14779. m_semaphore = VK_NULL_HANDLE;
  14780. return *this;
  14781. }
  14782. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14783. auto operator<=>( Semaphore const& ) const = default;
  14784. #else
  14785. bool operator==( Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
  14786. {
  14787. return m_semaphore == rhs.m_semaphore;
  14788. }
  14789. bool operator!=(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
  14790. {
  14791. return m_semaphore != rhs.m_semaphore;
  14792. }
  14793. bool operator<(Semaphore const & rhs ) const VULKAN_HPP_NOEXCEPT
  14794. {
  14795. return m_semaphore < rhs.m_semaphore;
  14796. }
  14797. #endif
  14798. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSemaphore() const VULKAN_HPP_NOEXCEPT
  14799. {
  14800. return m_semaphore;
  14801. }
  14802. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  14803. {
  14804. return m_semaphore != VK_NULL_HANDLE;
  14805. }
  14806. bool operator!() const VULKAN_HPP_NOEXCEPT
  14807. {
  14808. return m_semaphore == VK_NULL_HANDLE;
  14809. }
  14810. private:
  14811. VkSemaphore m_semaphore;
  14812. };
  14813. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
  14814. template <>
  14815. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSemaphore>
  14816. {
  14817. using type = VULKAN_HPP_NAMESPACE::Semaphore;
  14818. };
  14819. template <>
  14820. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSemaphore>
  14821. {
  14822. using Type = VULKAN_HPP_NAMESPACE::Semaphore;
  14823. };
  14824. template <>
  14825. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSemaphore>
  14826. {
  14827. using Type = VULKAN_HPP_NAMESPACE::Semaphore;
  14828. };
  14829. template <>
  14830. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Semaphore>
  14831. {
  14832. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  14833. };
  14834. class Fence
  14835. {
  14836. public:
  14837. using CType = VkFence;
  14838. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFence;
  14839. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence;
  14840. public:
  14841. VULKAN_HPP_CONSTEXPR Fence() VULKAN_HPP_NOEXCEPT
  14842. : m_fence(VK_NULL_HANDLE)
  14843. {}
  14844. VULKAN_HPP_CONSTEXPR Fence( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14845. : m_fence(VK_NULL_HANDLE)
  14846. {}
  14847. VULKAN_HPP_TYPESAFE_EXPLICIT Fence( VkFence fence ) VULKAN_HPP_NOEXCEPT
  14848. : m_fence( fence )
  14849. {}
  14850. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  14851. Fence & operator=(VkFence fence) VULKAN_HPP_NOEXCEPT
  14852. {
  14853. m_fence = fence;
  14854. return *this;
  14855. }
  14856. #endif
  14857. Fence & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  14858. {
  14859. m_fence = VK_NULL_HANDLE;
  14860. return *this;
  14861. }
  14862. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14863. auto operator<=>( Fence const& ) const = default;
  14864. #else
  14865. bool operator==( Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
  14866. {
  14867. return m_fence == rhs.m_fence;
  14868. }
  14869. bool operator!=(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
  14870. {
  14871. return m_fence != rhs.m_fence;
  14872. }
  14873. bool operator<(Fence const & rhs ) const VULKAN_HPP_NOEXCEPT
  14874. {
  14875. return m_fence < rhs.m_fence;
  14876. }
  14877. #endif
  14878. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFence() const VULKAN_HPP_NOEXCEPT
  14879. {
  14880. return m_fence;
  14881. }
  14882. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  14883. {
  14884. return m_fence != VK_NULL_HANDLE;
  14885. }
  14886. bool operator!() const VULKAN_HPP_NOEXCEPT
  14887. {
  14888. return m_fence == VK_NULL_HANDLE;
  14889. }
  14890. private:
  14891. VkFence m_fence;
  14892. };
  14893. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
  14894. template <>
  14895. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFence>
  14896. {
  14897. using type = VULKAN_HPP_NAMESPACE::Fence;
  14898. };
  14899. template <>
  14900. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFence>
  14901. {
  14902. using Type = VULKAN_HPP_NAMESPACE::Fence;
  14903. };
  14904. template <>
  14905. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFence>
  14906. {
  14907. using Type = VULKAN_HPP_NAMESPACE::Fence;
  14908. };
  14909. template <>
  14910. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Fence>
  14911. {
  14912. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  14913. };
  14914. struct AcquireNextImageInfoKHR
  14915. {
  14916. static const bool allowDuplicate = false;
  14917. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireNextImageInfoKHR;
  14918. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14919. VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint64_t timeout_ = {}, VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::Fence fence_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
  14920. : swapchain( swapchain_ ), timeout( timeout_ ), semaphore( semaphore_ ), fence( fence_ ), deviceMask( deviceMask_ )
  14921. {}
  14922. VULKAN_HPP_CONSTEXPR AcquireNextImageInfoKHR( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14923. AcquireNextImageInfoKHR( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14924. : AcquireNextImageInfoKHR( *reinterpret_cast<AcquireNextImageInfoKHR const *>( &rhs ) )
  14925. {}
  14926. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  14927. VULKAN_HPP_CONSTEXPR_14 AcquireNextImageInfoKHR & operator=( AcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  14928. AcquireNextImageInfoKHR & operator=( VkAcquireNextImageInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  14929. {
  14930. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR const *>( &rhs );
  14931. return *this;
  14932. }
  14933. AcquireNextImageInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  14934. {
  14935. pNext = pNext_;
  14936. return *this;
  14937. }
  14938. AcquireNextImageInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
  14939. {
  14940. swapchain = swapchain_;
  14941. return *this;
  14942. }
  14943. AcquireNextImageInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
  14944. {
  14945. timeout = timeout_;
  14946. return *this;
  14947. }
  14948. AcquireNextImageInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  14949. {
  14950. semaphore = semaphore_;
  14951. return *this;
  14952. }
  14953. AcquireNextImageInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
  14954. {
  14955. fence = fence_;
  14956. return *this;
  14957. }
  14958. AcquireNextImageInfoKHR & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
  14959. {
  14960. deviceMask = deviceMask_;
  14961. return *this;
  14962. }
  14963. operator VkAcquireNextImageInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  14964. {
  14965. return *reinterpret_cast<const VkAcquireNextImageInfoKHR*>( this );
  14966. }
  14967. operator VkAcquireNextImageInfoKHR &() VULKAN_HPP_NOEXCEPT
  14968. {
  14969. return *reinterpret_cast<VkAcquireNextImageInfoKHR*>( this );
  14970. }
  14971. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  14972. auto operator<=>( AcquireNextImageInfoKHR const& ) const = default;
  14973. #else
  14974. bool operator==( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14975. {
  14976. return ( sType == rhs.sType )
  14977. && ( pNext == rhs.pNext )
  14978. && ( swapchain == rhs.swapchain )
  14979. && ( timeout == rhs.timeout )
  14980. && ( semaphore == rhs.semaphore )
  14981. && ( fence == rhs.fence )
  14982. && ( deviceMask == rhs.deviceMask );
  14983. }
  14984. bool operator!=( AcquireNextImageInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  14985. {
  14986. return !operator==( rhs );
  14987. }
  14988. #endif
  14989. public:
  14990. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireNextImageInfoKHR;
  14991. const void* pNext = {};
  14992. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
  14993. uint64_t timeout = {};
  14994. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  14995. VULKAN_HPP_NAMESPACE::Fence fence = {};
  14996. uint32_t deviceMask = {};
  14997. };
  14998. static_assert( sizeof( AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ), "struct and wrapper have different size!" );
  14999. static_assert( std::is_standard_layout<AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
  15000. template <>
  15001. struct CppType<StructureType, StructureType::eAcquireNextImageInfoKHR>
  15002. {
  15003. using Type = AcquireNextImageInfoKHR;
  15004. };
  15005. struct AcquireProfilingLockInfoKHR
  15006. {
  15007. static const bool allowDuplicate = false;
  15008. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAcquireProfilingLockInfoKHR;
  15009. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15010. VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR(VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ = {}, uint64_t timeout_ = {}) VULKAN_HPP_NOEXCEPT
  15011. : flags( flags_ ), timeout( timeout_ )
  15012. {}
  15013. VULKAN_HPP_CONSTEXPR AcquireProfilingLockInfoKHR( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15014. AcquireProfilingLockInfoKHR( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  15015. : AcquireProfilingLockInfoKHR( *reinterpret_cast<AcquireProfilingLockInfoKHR const *>( &rhs ) )
  15016. {}
  15017. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15018. VULKAN_HPP_CONSTEXPR_14 AcquireProfilingLockInfoKHR & operator=( AcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15019. AcquireProfilingLockInfoKHR & operator=( VkAcquireProfilingLockInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  15020. {
  15021. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR const *>( &rhs );
  15022. return *this;
  15023. }
  15024. AcquireProfilingLockInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15025. {
  15026. pNext = pNext_;
  15027. return *this;
  15028. }
  15029. AcquireProfilingLockInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  15030. {
  15031. flags = flags_;
  15032. return *this;
  15033. }
  15034. AcquireProfilingLockInfoKHR & setTimeout( uint64_t timeout_ ) VULKAN_HPP_NOEXCEPT
  15035. {
  15036. timeout = timeout_;
  15037. return *this;
  15038. }
  15039. operator VkAcquireProfilingLockInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  15040. {
  15041. return *reinterpret_cast<const VkAcquireProfilingLockInfoKHR*>( this );
  15042. }
  15043. operator VkAcquireProfilingLockInfoKHR &() VULKAN_HPP_NOEXCEPT
  15044. {
  15045. return *reinterpret_cast<VkAcquireProfilingLockInfoKHR*>( this );
  15046. }
  15047. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15048. auto operator<=>( AcquireProfilingLockInfoKHR const& ) const = default;
  15049. #else
  15050. bool operator==( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  15051. {
  15052. return ( sType == rhs.sType )
  15053. && ( pNext == rhs.pNext )
  15054. && ( flags == rhs.flags )
  15055. && ( timeout == rhs.timeout );
  15056. }
  15057. bool operator!=( AcquireProfilingLockInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  15058. {
  15059. return !operator==( rhs );
  15060. }
  15061. #endif
  15062. public:
  15063. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAcquireProfilingLockInfoKHR;
  15064. const void* pNext = {};
  15065. VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR flags = {};
  15066. uint64_t timeout = {};
  15067. };
  15068. static_assert( sizeof( AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ), "struct and wrapper have different size!" );
  15069. static_assert( std::is_standard_layout<AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
  15070. template <>
  15071. struct CppType<StructureType, StructureType::eAcquireProfilingLockInfoKHR>
  15072. {
  15073. using Type = AcquireProfilingLockInfoKHR;
  15074. };
  15075. struct AllocationCallbacks
  15076. {
  15077. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15078. VULKAN_HPP_CONSTEXPR AllocationCallbacks(void* pUserData_ = {}, PFN_vkAllocationFunction pfnAllocation_ = {}, PFN_vkReallocationFunction pfnReallocation_ = {}, PFN_vkFreeFunction pfnFree_ = {}, PFN_vkInternalAllocationNotification pfnInternalAllocation_ = {}, PFN_vkInternalFreeNotification pfnInternalFree_ = {}) VULKAN_HPP_NOEXCEPT
  15079. : pUserData( pUserData_ ), pfnAllocation( pfnAllocation_ ), pfnReallocation( pfnReallocation_ ), pfnFree( pfnFree_ ), pfnInternalAllocation( pfnInternalAllocation_ ), pfnInternalFree( pfnInternalFree_ )
  15080. {}
  15081. VULKAN_HPP_CONSTEXPR AllocationCallbacks( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15082. AllocationCallbacks( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
  15083. : AllocationCallbacks( *reinterpret_cast<AllocationCallbacks const *>( &rhs ) )
  15084. {}
  15085. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15086. VULKAN_HPP_CONSTEXPR_14 AllocationCallbacks & operator=( AllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15087. AllocationCallbacks & operator=( VkAllocationCallbacks const & rhs ) VULKAN_HPP_NOEXCEPT
  15088. {
  15089. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AllocationCallbacks const *>( &rhs );
  15090. return *this;
  15091. }
  15092. AllocationCallbacks & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
  15093. {
  15094. pUserData = pUserData_;
  15095. return *this;
  15096. }
  15097. AllocationCallbacks & setPfnAllocation( PFN_vkAllocationFunction pfnAllocation_ ) VULKAN_HPP_NOEXCEPT
  15098. {
  15099. pfnAllocation = pfnAllocation_;
  15100. return *this;
  15101. }
  15102. AllocationCallbacks & setPfnReallocation( PFN_vkReallocationFunction pfnReallocation_ ) VULKAN_HPP_NOEXCEPT
  15103. {
  15104. pfnReallocation = pfnReallocation_;
  15105. return *this;
  15106. }
  15107. AllocationCallbacks & setPfnFree( PFN_vkFreeFunction pfnFree_ ) VULKAN_HPP_NOEXCEPT
  15108. {
  15109. pfnFree = pfnFree_;
  15110. return *this;
  15111. }
  15112. AllocationCallbacks & setPfnInternalAllocation( PFN_vkInternalAllocationNotification pfnInternalAllocation_ ) VULKAN_HPP_NOEXCEPT
  15113. {
  15114. pfnInternalAllocation = pfnInternalAllocation_;
  15115. return *this;
  15116. }
  15117. AllocationCallbacks & setPfnInternalFree( PFN_vkInternalFreeNotification pfnInternalFree_ ) VULKAN_HPP_NOEXCEPT
  15118. {
  15119. pfnInternalFree = pfnInternalFree_;
  15120. return *this;
  15121. }
  15122. operator VkAllocationCallbacks const&() const VULKAN_HPP_NOEXCEPT
  15123. {
  15124. return *reinterpret_cast<const VkAllocationCallbacks*>( this );
  15125. }
  15126. operator VkAllocationCallbacks &() VULKAN_HPP_NOEXCEPT
  15127. {
  15128. return *reinterpret_cast<VkAllocationCallbacks*>( this );
  15129. }
  15130. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15131. auto operator<=>( AllocationCallbacks const& ) const = default;
  15132. #else
  15133. bool operator==( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
  15134. {
  15135. return ( pUserData == rhs.pUserData )
  15136. && ( pfnAllocation == rhs.pfnAllocation )
  15137. && ( pfnReallocation == rhs.pfnReallocation )
  15138. && ( pfnFree == rhs.pfnFree )
  15139. && ( pfnInternalAllocation == rhs.pfnInternalAllocation )
  15140. && ( pfnInternalFree == rhs.pfnInternalFree );
  15141. }
  15142. bool operator!=( AllocationCallbacks const& rhs ) const VULKAN_HPP_NOEXCEPT
  15143. {
  15144. return !operator==( rhs );
  15145. }
  15146. #endif
  15147. public:
  15148. void* pUserData = {};
  15149. PFN_vkAllocationFunction pfnAllocation = {};
  15150. PFN_vkReallocationFunction pfnReallocation = {};
  15151. PFN_vkFreeFunction pfnFree = {};
  15152. PFN_vkInternalAllocationNotification pfnInternalAllocation = {};
  15153. PFN_vkInternalFreeNotification pfnInternalFree = {};
  15154. };
  15155. static_assert( sizeof( AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
  15156. static_assert( std::is_standard_layout<AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
  15157. struct ComponentMapping
  15158. {
  15159. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15160. VULKAN_HPP_CONSTEXPR ComponentMapping(VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity, VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity) VULKAN_HPP_NOEXCEPT
  15161. : r( r_ ), g( g_ ), b( b_ ), a( a_ )
  15162. {}
  15163. VULKAN_HPP_CONSTEXPR ComponentMapping( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15164. ComponentMapping( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
  15165. : ComponentMapping( *reinterpret_cast<ComponentMapping const *>( &rhs ) )
  15166. {}
  15167. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15168. VULKAN_HPP_CONSTEXPR_14 ComponentMapping & operator=( ComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15169. ComponentMapping & operator=( VkComponentMapping const & rhs ) VULKAN_HPP_NOEXCEPT
  15170. {
  15171. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComponentMapping const *>( &rhs );
  15172. return *this;
  15173. }
  15174. ComponentMapping & setR( VULKAN_HPP_NAMESPACE::ComponentSwizzle r_ ) VULKAN_HPP_NOEXCEPT
  15175. {
  15176. r = r_;
  15177. return *this;
  15178. }
  15179. ComponentMapping & setG( VULKAN_HPP_NAMESPACE::ComponentSwizzle g_ ) VULKAN_HPP_NOEXCEPT
  15180. {
  15181. g = g_;
  15182. return *this;
  15183. }
  15184. ComponentMapping & setB( VULKAN_HPP_NAMESPACE::ComponentSwizzle b_ ) VULKAN_HPP_NOEXCEPT
  15185. {
  15186. b = b_;
  15187. return *this;
  15188. }
  15189. ComponentMapping & setA( VULKAN_HPP_NAMESPACE::ComponentSwizzle a_ ) VULKAN_HPP_NOEXCEPT
  15190. {
  15191. a = a_;
  15192. return *this;
  15193. }
  15194. operator VkComponentMapping const&() const VULKAN_HPP_NOEXCEPT
  15195. {
  15196. return *reinterpret_cast<const VkComponentMapping*>( this );
  15197. }
  15198. operator VkComponentMapping &() VULKAN_HPP_NOEXCEPT
  15199. {
  15200. return *reinterpret_cast<VkComponentMapping*>( this );
  15201. }
  15202. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15203. auto operator<=>( ComponentMapping const& ) const = default;
  15204. #else
  15205. bool operator==( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
  15206. {
  15207. return ( r == rhs.r )
  15208. && ( g == rhs.g )
  15209. && ( b == rhs.b )
  15210. && ( a == rhs.a );
  15211. }
  15212. bool operator!=( ComponentMapping const& rhs ) const VULKAN_HPP_NOEXCEPT
  15213. {
  15214. return !operator==( rhs );
  15215. }
  15216. #endif
  15217. public:
  15218. VULKAN_HPP_NAMESPACE::ComponentSwizzle r = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
  15219. VULKAN_HPP_NAMESPACE::ComponentSwizzle g = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
  15220. VULKAN_HPP_NAMESPACE::ComponentSwizzle b = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
  15221. VULKAN_HPP_NAMESPACE::ComponentSwizzle a = VULKAN_HPP_NAMESPACE::ComponentSwizzle::eIdentity;
  15222. };
  15223. static_assert( sizeof( ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
  15224. static_assert( std::is_standard_layout<ComponentMapping>::value, "struct wrapper is not a standard layout!" );
  15225. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  15226. struct AndroidHardwareBufferFormatPropertiesANDROID
  15227. {
  15228. static const bool allowDuplicate = false;
  15229. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
  15230. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15231. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint64_t externalFormat_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures_ = {}, VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents_ = {}, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven) VULKAN_HPP_NOEXCEPT
  15232. : format( format_ ), externalFormat( externalFormat_ ), formatFeatures( formatFeatures_ ), samplerYcbcrConversionComponents( samplerYcbcrConversionComponents_ ), suggestedYcbcrModel( suggestedYcbcrModel_ ), suggestedYcbcrRange( suggestedYcbcrRange_ ), suggestedXChromaOffset( suggestedXChromaOffset_ ), suggestedYChromaOffset( suggestedYChromaOffset_ )
  15233. {}
  15234. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferFormatPropertiesANDROID( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15235. AndroidHardwareBufferFormatPropertiesANDROID( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15236. : AndroidHardwareBufferFormatPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs ) )
  15237. {}
  15238. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15239. VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferFormatPropertiesANDROID & operator=( AndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15240. AndroidHardwareBufferFormatPropertiesANDROID & operator=( VkAndroidHardwareBufferFormatPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15241. {
  15242. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID const *>( &rhs );
  15243. return *this;
  15244. }
  15245. operator VkAndroidHardwareBufferFormatPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
  15246. {
  15247. return *reinterpret_cast<const VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
  15248. }
  15249. operator VkAndroidHardwareBufferFormatPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
  15250. {
  15251. return *reinterpret_cast<VkAndroidHardwareBufferFormatPropertiesANDROID*>( this );
  15252. }
  15253. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15254. auto operator<=>( AndroidHardwareBufferFormatPropertiesANDROID const& ) const = default;
  15255. #else
  15256. bool operator==( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15257. {
  15258. return ( sType == rhs.sType )
  15259. && ( pNext == rhs.pNext )
  15260. && ( format == rhs.format )
  15261. && ( externalFormat == rhs.externalFormat )
  15262. && ( formatFeatures == rhs.formatFeatures )
  15263. && ( samplerYcbcrConversionComponents == rhs.samplerYcbcrConversionComponents )
  15264. && ( suggestedYcbcrModel == rhs.suggestedYcbcrModel )
  15265. && ( suggestedYcbcrRange == rhs.suggestedYcbcrRange )
  15266. && ( suggestedXChromaOffset == rhs.suggestedXChromaOffset )
  15267. && ( suggestedYChromaOffset == rhs.suggestedYChromaOffset );
  15268. }
  15269. bool operator!=( AndroidHardwareBufferFormatPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15270. {
  15271. return !operator==( rhs );
  15272. }
  15273. #endif
  15274. public:
  15275. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferFormatPropertiesANDROID;
  15276. void* pNext = {};
  15277. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  15278. uint64_t externalFormat = {};
  15279. VULKAN_HPP_NAMESPACE::FormatFeatureFlags formatFeatures = {};
  15280. VULKAN_HPP_NAMESPACE::ComponentMapping samplerYcbcrConversionComponents = {};
  15281. VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion suggestedYcbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
  15282. VULKAN_HPP_NAMESPACE::SamplerYcbcrRange suggestedYcbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
  15283. VULKAN_HPP_NAMESPACE::ChromaLocation suggestedXChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
  15284. VULKAN_HPP_NAMESPACE::ChromaLocation suggestedYChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
  15285. };
  15286. static_assert( sizeof( AndroidHardwareBufferFormatPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ), "struct and wrapper have different size!" );
  15287. static_assert( std::is_standard_layout<AndroidHardwareBufferFormatPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
  15288. template <>
  15289. struct CppType<StructureType, StructureType::eAndroidHardwareBufferFormatPropertiesANDROID>
  15290. {
  15291. using Type = AndroidHardwareBufferFormatPropertiesANDROID;
  15292. };
  15293. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  15294. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  15295. struct AndroidHardwareBufferPropertiesANDROID
  15296. {
  15297. static const bool allowDuplicate = false;
  15298. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
  15299. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15300. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
  15301. : allocationSize( allocationSize_ ), memoryTypeBits( memoryTypeBits_ )
  15302. {}
  15303. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferPropertiesANDROID( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15304. AndroidHardwareBufferPropertiesANDROID( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15305. : AndroidHardwareBufferPropertiesANDROID( *reinterpret_cast<AndroidHardwareBufferPropertiesANDROID const *>( &rhs ) )
  15306. {}
  15307. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15308. VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferPropertiesANDROID & operator=( AndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15309. AndroidHardwareBufferPropertiesANDROID & operator=( VkAndroidHardwareBufferPropertiesANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15310. {
  15311. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID const *>( &rhs );
  15312. return *this;
  15313. }
  15314. operator VkAndroidHardwareBufferPropertiesANDROID const&() const VULKAN_HPP_NOEXCEPT
  15315. {
  15316. return *reinterpret_cast<const VkAndroidHardwareBufferPropertiesANDROID*>( this );
  15317. }
  15318. operator VkAndroidHardwareBufferPropertiesANDROID &() VULKAN_HPP_NOEXCEPT
  15319. {
  15320. return *reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID*>( this );
  15321. }
  15322. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15323. auto operator<=>( AndroidHardwareBufferPropertiesANDROID const& ) const = default;
  15324. #else
  15325. bool operator==( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15326. {
  15327. return ( sType == rhs.sType )
  15328. && ( pNext == rhs.pNext )
  15329. && ( allocationSize == rhs.allocationSize )
  15330. && ( memoryTypeBits == rhs.memoryTypeBits );
  15331. }
  15332. bool operator!=( AndroidHardwareBufferPropertiesANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15333. {
  15334. return !operator==( rhs );
  15335. }
  15336. #endif
  15337. public:
  15338. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferPropertiesANDROID;
  15339. void* pNext = {};
  15340. VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
  15341. uint32_t memoryTypeBits = {};
  15342. };
  15343. static_assert( sizeof( AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ), "struct and wrapper have different size!" );
  15344. static_assert( std::is_standard_layout<AndroidHardwareBufferPropertiesANDROID>::value, "struct wrapper is not a standard layout!" );
  15345. template <>
  15346. struct CppType<StructureType, StructureType::eAndroidHardwareBufferPropertiesANDROID>
  15347. {
  15348. using Type = AndroidHardwareBufferPropertiesANDROID;
  15349. };
  15350. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  15351. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  15352. struct AndroidHardwareBufferUsageANDROID
  15353. {
  15354. static const bool allowDuplicate = false;
  15355. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidHardwareBufferUsageANDROID;
  15356. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15357. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID(uint64_t androidHardwareBufferUsage_ = {}) VULKAN_HPP_NOEXCEPT
  15358. : androidHardwareBufferUsage( androidHardwareBufferUsage_ )
  15359. {}
  15360. VULKAN_HPP_CONSTEXPR AndroidHardwareBufferUsageANDROID( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15361. AndroidHardwareBufferUsageANDROID( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15362. : AndroidHardwareBufferUsageANDROID( *reinterpret_cast<AndroidHardwareBufferUsageANDROID const *>( &rhs ) )
  15363. {}
  15364. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15365. VULKAN_HPP_CONSTEXPR_14 AndroidHardwareBufferUsageANDROID & operator=( AndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15366. AndroidHardwareBufferUsageANDROID & operator=( VkAndroidHardwareBufferUsageANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  15367. {
  15368. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID const *>( &rhs );
  15369. return *this;
  15370. }
  15371. operator VkAndroidHardwareBufferUsageANDROID const&() const VULKAN_HPP_NOEXCEPT
  15372. {
  15373. return *reinterpret_cast<const VkAndroidHardwareBufferUsageANDROID*>( this );
  15374. }
  15375. operator VkAndroidHardwareBufferUsageANDROID &() VULKAN_HPP_NOEXCEPT
  15376. {
  15377. return *reinterpret_cast<VkAndroidHardwareBufferUsageANDROID*>( this );
  15378. }
  15379. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15380. auto operator<=>( AndroidHardwareBufferUsageANDROID const& ) const = default;
  15381. #else
  15382. bool operator==( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15383. {
  15384. return ( sType == rhs.sType )
  15385. && ( pNext == rhs.pNext )
  15386. && ( androidHardwareBufferUsage == rhs.androidHardwareBufferUsage );
  15387. }
  15388. bool operator!=( AndroidHardwareBufferUsageANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  15389. {
  15390. return !operator==( rhs );
  15391. }
  15392. #endif
  15393. public:
  15394. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidHardwareBufferUsageANDROID;
  15395. void* pNext = {};
  15396. uint64_t androidHardwareBufferUsage = {};
  15397. };
  15398. static_assert( sizeof( AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ), "struct and wrapper have different size!" );
  15399. static_assert( std::is_standard_layout<AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
  15400. template <>
  15401. struct CppType<StructureType, StructureType::eAndroidHardwareBufferUsageANDROID>
  15402. {
  15403. using Type = AndroidHardwareBufferUsageANDROID;
  15404. };
  15405. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  15406. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  15407. struct AndroidSurfaceCreateInfoKHR
  15408. {
  15409. static const bool allowDuplicate = false;
  15410. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAndroidSurfaceCreateInfoKHR;
  15411. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15412. VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ = {}, struct ANativeWindow* window_ = {}) VULKAN_HPP_NOEXCEPT
  15413. : flags( flags_ ), window( window_ )
  15414. {}
  15415. VULKAN_HPP_CONSTEXPR AndroidSurfaceCreateInfoKHR( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15416. AndroidSurfaceCreateInfoKHR( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  15417. : AndroidSurfaceCreateInfoKHR( *reinterpret_cast<AndroidSurfaceCreateInfoKHR const *>( &rhs ) )
  15418. {}
  15419. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15420. VULKAN_HPP_CONSTEXPR_14 AndroidSurfaceCreateInfoKHR & operator=( AndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15421. AndroidSurfaceCreateInfoKHR & operator=( VkAndroidSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  15422. {
  15423. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR const *>( &rhs );
  15424. return *this;
  15425. }
  15426. AndroidSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15427. {
  15428. pNext = pNext_;
  15429. return *this;
  15430. }
  15431. AndroidSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  15432. {
  15433. flags = flags_;
  15434. return *this;
  15435. }
  15436. AndroidSurfaceCreateInfoKHR & setWindow( struct ANativeWindow* window_ ) VULKAN_HPP_NOEXCEPT
  15437. {
  15438. window = window_;
  15439. return *this;
  15440. }
  15441. operator VkAndroidSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  15442. {
  15443. return *reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR*>( this );
  15444. }
  15445. operator VkAndroidSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  15446. {
  15447. return *reinterpret_cast<VkAndroidSurfaceCreateInfoKHR*>( this );
  15448. }
  15449. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15450. auto operator<=>( AndroidSurfaceCreateInfoKHR const& ) const = default;
  15451. #else
  15452. bool operator==( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  15453. {
  15454. return ( sType == rhs.sType )
  15455. && ( pNext == rhs.pNext )
  15456. && ( flags == rhs.flags )
  15457. && ( window == rhs.window );
  15458. }
  15459. bool operator!=( AndroidSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  15460. {
  15461. return !operator==( rhs );
  15462. }
  15463. #endif
  15464. public:
  15465. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAndroidSurfaceCreateInfoKHR;
  15466. const void* pNext = {};
  15467. VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR flags = {};
  15468. struct ANativeWindow* window = {};
  15469. };
  15470. static_assert( sizeof( AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  15471. static_assert( std::is_standard_layout<AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  15472. template <>
  15473. struct CppType<StructureType, StructureType::eAndroidSurfaceCreateInfoKHR>
  15474. {
  15475. using Type = AndroidSurfaceCreateInfoKHR;
  15476. };
  15477. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  15478. struct ApplicationInfo
  15479. {
  15480. static const bool allowDuplicate = false;
  15481. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eApplicationInfo;
  15482. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15483. VULKAN_HPP_CONSTEXPR ApplicationInfo(const char* pApplicationName_ = {}, uint32_t applicationVersion_ = {}, const char* pEngineName_ = {}, uint32_t engineVersion_ = {}, uint32_t apiVersion_ = {}) VULKAN_HPP_NOEXCEPT
  15484. : pApplicationName( pApplicationName_ ), applicationVersion( applicationVersion_ ), pEngineName( pEngineName_ ), engineVersion( engineVersion_ ), apiVersion( apiVersion_ )
  15485. {}
  15486. VULKAN_HPP_CONSTEXPR ApplicationInfo( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15487. ApplicationInfo( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  15488. : ApplicationInfo( *reinterpret_cast<ApplicationInfo const *>( &rhs ) )
  15489. {}
  15490. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15491. VULKAN_HPP_CONSTEXPR_14 ApplicationInfo & operator=( ApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15492. ApplicationInfo & operator=( VkApplicationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  15493. {
  15494. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ApplicationInfo const *>( &rhs );
  15495. return *this;
  15496. }
  15497. ApplicationInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15498. {
  15499. pNext = pNext_;
  15500. return *this;
  15501. }
  15502. ApplicationInfo & setPApplicationName( const char* pApplicationName_ ) VULKAN_HPP_NOEXCEPT
  15503. {
  15504. pApplicationName = pApplicationName_;
  15505. return *this;
  15506. }
  15507. ApplicationInfo & setApplicationVersion( uint32_t applicationVersion_ ) VULKAN_HPP_NOEXCEPT
  15508. {
  15509. applicationVersion = applicationVersion_;
  15510. return *this;
  15511. }
  15512. ApplicationInfo & setPEngineName( const char* pEngineName_ ) VULKAN_HPP_NOEXCEPT
  15513. {
  15514. pEngineName = pEngineName_;
  15515. return *this;
  15516. }
  15517. ApplicationInfo & setEngineVersion( uint32_t engineVersion_ ) VULKAN_HPP_NOEXCEPT
  15518. {
  15519. engineVersion = engineVersion_;
  15520. return *this;
  15521. }
  15522. ApplicationInfo & setApiVersion( uint32_t apiVersion_ ) VULKAN_HPP_NOEXCEPT
  15523. {
  15524. apiVersion = apiVersion_;
  15525. return *this;
  15526. }
  15527. operator VkApplicationInfo const&() const VULKAN_HPP_NOEXCEPT
  15528. {
  15529. return *reinterpret_cast<const VkApplicationInfo*>( this );
  15530. }
  15531. operator VkApplicationInfo &() VULKAN_HPP_NOEXCEPT
  15532. {
  15533. return *reinterpret_cast<VkApplicationInfo*>( this );
  15534. }
  15535. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15536. auto operator<=>( ApplicationInfo const& ) const = default;
  15537. #else
  15538. bool operator==( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  15539. {
  15540. return ( sType == rhs.sType )
  15541. && ( pNext == rhs.pNext )
  15542. && ( pApplicationName == rhs.pApplicationName )
  15543. && ( applicationVersion == rhs.applicationVersion )
  15544. && ( pEngineName == rhs.pEngineName )
  15545. && ( engineVersion == rhs.engineVersion )
  15546. && ( apiVersion == rhs.apiVersion );
  15547. }
  15548. bool operator!=( ApplicationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  15549. {
  15550. return !operator==( rhs );
  15551. }
  15552. #endif
  15553. public:
  15554. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eApplicationInfo;
  15555. const void* pNext = {};
  15556. const char* pApplicationName = {};
  15557. uint32_t applicationVersion = {};
  15558. const char* pEngineName = {};
  15559. uint32_t engineVersion = {};
  15560. uint32_t apiVersion = {};
  15561. };
  15562. static_assert( sizeof( ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
  15563. static_assert( std::is_standard_layout<ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
  15564. template <>
  15565. struct CppType<StructureType, StructureType::eApplicationInfo>
  15566. {
  15567. using Type = ApplicationInfo;
  15568. };
  15569. struct AttachmentDescription
  15570. {
  15571. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15572. VULKAN_HPP_CONSTEXPR AttachmentDescription(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  15573. : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
  15574. {}
  15575. VULKAN_HPP_CONSTEXPR AttachmentDescription( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15576. AttachmentDescription( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  15577. : AttachmentDescription( *reinterpret_cast<AttachmentDescription const *>( &rhs ) )
  15578. {}
  15579. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15580. VULKAN_HPP_CONSTEXPR_14 AttachmentDescription & operator=( AttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15581. AttachmentDescription & operator=( VkAttachmentDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  15582. {
  15583. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription const *>( &rhs );
  15584. return *this;
  15585. }
  15586. AttachmentDescription & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
  15587. {
  15588. flags = flags_;
  15589. return *this;
  15590. }
  15591. AttachmentDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  15592. {
  15593. format = format_;
  15594. return *this;
  15595. }
  15596. AttachmentDescription & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
  15597. {
  15598. samples = samples_;
  15599. return *this;
  15600. }
  15601. AttachmentDescription & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
  15602. {
  15603. loadOp = loadOp_;
  15604. return *this;
  15605. }
  15606. AttachmentDescription & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
  15607. {
  15608. storeOp = storeOp_;
  15609. return *this;
  15610. }
  15611. AttachmentDescription & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
  15612. {
  15613. stencilLoadOp = stencilLoadOp_;
  15614. return *this;
  15615. }
  15616. AttachmentDescription & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
  15617. {
  15618. stencilStoreOp = stencilStoreOp_;
  15619. return *this;
  15620. }
  15621. AttachmentDescription & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
  15622. {
  15623. initialLayout = initialLayout_;
  15624. return *this;
  15625. }
  15626. AttachmentDescription & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
  15627. {
  15628. finalLayout = finalLayout_;
  15629. return *this;
  15630. }
  15631. operator VkAttachmentDescription const&() const VULKAN_HPP_NOEXCEPT
  15632. {
  15633. return *reinterpret_cast<const VkAttachmentDescription*>( this );
  15634. }
  15635. operator VkAttachmentDescription &() VULKAN_HPP_NOEXCEPT
  15636. {
  15637. return *reinterpret_cast<VkAttachmentDescription*>( this );
  15638. }
  15639. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15640. auto operator<=>( AttachmentDescription const& ) const = default;
  15641. #else
  15642. bool operator==( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  15643. {
  15644. return ( flags == rhs.flags )
  15645. && ( format == rhs.format )
  15646. && ( samples == rhs.samples )
  15647. && ( loadOp == rhs.loadOp )
  15648. && ( storeOp == rhs.storeOp )
  15649. && ( stencilLoadOp == rhs.stencilLoadOp )
  15650. && ( stencilStoreOp == rhs.stencilStoreOp )
  15651. && ( initialLayout == rhs.initialLayout )
  15652. && ( finalLayout == rhs.finalLayout );
  15653. }
  15654. bool operator!=( AttachmentDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  15655. {
  15656. return !operator==( rhs );
  15657. }
  15658. #endif
  15659. public:
  15660. VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
  15661. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  15662. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  15663. VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
  15664. VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
  15665. VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
  15666. VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
  15667. VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15668. VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15669. };
  15670. static_assert( sizeof( AttachmentDescription ) == sizeof( VkAttachmentDescription ), "struct and wrapper have different size!" );
  15671. static_assert( std::is_standard_layout<AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
  15672. struct AttachmentDescription2
  15673. {
  15674. static const bool allowDuplicate = false;
  15675. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescription2;
  15676. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15677. VULKAN_HPP_CONSTEXPR AttachmentDescription2(VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad, VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  15678. : flags( flags_ ), format( format_ ), samples( samples_ ), loadOp( loadOp_ ), storeOp( storeOp_ ), stencilLoadOp( stencilLoadOp_ ), stencilStoreOp( stencilStoreOp_ ), initialLayout( initialLayout_ ), finalLayout( finalLayout_ )
  15679. {}
  15680. VULKAN_HPP_CONSTEXPR AttachmentDescription2( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15681. AttachmentDescription2( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
  15682. : AttachmentDescription2( *reinterpret_cast<AttachmentDescription2 const *>( &rhs ) )
  15683. {}
  15684. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15685. VULKAN_HPP_CONSTEXPR_14 AttachmentDescription2 & operator=( AttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15686. AttachmentDescription2 & operator=( VkAttachmentDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
  15687. {
  15688. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescription2 const *>( &rhs );
  15689. return *this;
  15690. }
  15691. AttachmentDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15692. {
  15693. pNext = pNext_;
  15694. return *this;
  15695. }
  15696. AttachmentDescription2 & setFlags( VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
  15697. {
  15698. flags = flags_;
  15699. return *this;
  15700. }
  15701. AttachmentDescription2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  15702. {
  15703. format = format_;
  15704. return *this;
  15705. }
  15706. AttachmentDescription2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
  15707. {
  15708. samples = samples_;
  15709. return *this;
  15710. }
  15711. AttachmentDescription2 & setLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp_ ) VULKAN_HPP_NOEXCEPT
  15712. {
  15713. loadOp = loadOp_;
  15714. return *this;
  15715. }
  15716. AttachmentDescription2 & setStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp_ ) VULKAN_HPP_NOEXCEPT
  15717. {
  15718. storeOp = storeOp_;
  15719. return *this;
  15720. }
  15721. AttachmentDescription2 & setStencilLoadOp( VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp_ ) VULKAN_HPP_NOEXCEPT
  15722. {
  15723. stencilLoadOp = stencilLoadOp_;
  15724. return *this;
  15725. }
  15726. AttachmentDescription2 & setStencilStoreOp( VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp_ ) VULKAN_HPP_NOEXCEPT
  15727. {
  15728. stencilStoreOp = stencilStoreOp_;
  15729. return *this;
  15730. }
  15731. AttachmentDescription2 & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
  15732. {
  15733. initialLayout = initialLayout_;
  15734. return *this;
  15735. }
  15736. AttachmentDescription2 & setFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout finalLayout_ ) VULKAN_HPP_NOEXCEPT
  15737. {
  15738. finalLayout = finalLayout_;
  15739. return *this;
  15740. }
  15741. operator VkAttachmentDescription2 const&() const VULKAN_HPP_NOEXCEPT
  15742. {
  15743. return *reinterpret_cast<const VkAttachmentDescription2*>( this );
  15744. }
  15745. operator VkAttachmentDescription2 &() VULKAN_HPP_NOEXCEPT
  15746. {
  15747. return *reinterpret_cast<VkAttachmentDescription2*>( this );
  15748. }
  15749. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15750. auto operator<=>( AttachmentDescription2 const& ) const = default;
  15751. #else
  15752. bool operator==( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  15753. {
  15754. return ( sType == rhs.sType )
  15755. && ( pNext == rhs.pNext )
  15756. && ( flags == rhs.flags )
  15757. && ( format == rhs.format )
  15758. && ( samples == rhs.samples )
  15759. && ( loadOp == rhs.loadOp )
  15760. && ( storeOp == rhs.storeOp )
  15761. && ( stencilLoadOp == rhs.stencilLoadOp )
  15762. && ( stencilStoreOp == rhs.stencilStoreOp )
  15763. && ( initialLayout == rhs.initialLayout )
  15764. && ( finalLayout == rhs.finalLayout );
  15765. }
  15766. bool operator!=( AttachmentDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  15767. {
  15768. return !operator==( rhs );
  15769. }
  15770. #endif
  15771. public:
  15772. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescription2;
  15773. const void* pNext = {};
  15774. VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags flags = {};
  15775. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  15776. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  15777. VULKAN_HPP_NAMESPACE::AttachmentLoadOp loadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
  15778. VULKAN_HPP_NAMESPACE::AttachmentStoreOp storeOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
  15779. VULKAN_HPP_NAMESPACE::AttachmentLoadOp stencilLoadOp = VULKAN_HPP_NAMESPACE::AttachmentLoadOp::eLoad;
  15780. VULKAN_HPP_NAMESPACE::AttachmentStoreOp stencilStoreOp = VULKAN_HPP_NAMESPACE::AttachmentStoreOp::eStore;
  15781. VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15782. VULKAN_HPP_NAMESPACE::ImageLayout finalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15783. };
  15784. static_assert( sizeof( AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ), "struct and wrapper have different size!" );
  15785. static_assert( std::is_standard_layout<AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
  15786. template <>
  15787. struct CppType<StructureType, StructureType::eAttachmentDescription2>
  15788. {
  15789. using Type = AttachmentDescription2;
  15790. };
  15791. using AttachmentDescription2KHR = AttachmentDescription2;
  15792. struct AttachmentDescriptionStencilLayout
  15793. {
  15794. static const bool allowDuplicate = false;
  15795. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentDescriptionStencilLayout;
  15796. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15797. VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  15798. : stencilInitialLayout( stencilInitialLayout_ ), stencilFinalLayout( stencilFinalLayout_ )
  15799. {}
  15800. VULKAN_HPP_CONSTEXPR AttachmentDescriptionStencilLayout( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15801. AttachmentDescriptionStencilLayout( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  15802. : AttachmentDescriptionStencilLayout( *reinterpret_cast<AttachmentDescriptionStencilLayout const *>( &rhs ) )
  15803. {}
  15804. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15805. VULKAN_HPP_CONSTEXPR_14 AttachmentDescriptionStencilLayout & operator=( AttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15806. AttachmentDescriptionStencilLayout & operator=( VkAttachmentDescriptionStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  15807. {
  15808. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout const *>( &rhs );
  15809. return *this;
  15810. }
  15811. AttachmentDescriptionStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15812. {
  15813. pNext = pNext_;
  15814. return *this;
  15815. }
  15816. AttachmentDescriptionStencilLayout & setStencilInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout_ ) VULKAN_HPP_NOEXCEPT
  15817. {
  15818. stencilInitialLayout = stencilInitialLayout_;
  15819. return *this;
  15820. }
  15821. AttachmentDescriptionStencilLayout & setStencilFinalLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout_ ) VULKAN_HPP_NOEXCEPT
  15822. {
  15823. stencilFinalLayout = stencilFinalLayout_;
  15824. return *this;
  15825. }
  15826. operator VkAttachmentDescriptionStencilLayout const&() const VULKAN_HPP_NOEXCEPT
  15827. {
  15828. return *reinterpret_cast<const VkAttachmentDescriptionStencilLayout*>( this );
  15829. }
  15830. operator VkAttachmentDescriptionStencilLayout &() VULKAN_HPP_NOEXCEPT
  15831. {
  15832. return *reinterpret_cast<VkAttachmentDescriptionStencilLayout*>( this );
  15833. }
  15834. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15835. auto operator<=>( AttachmentDescriptionStencilLayout const& ) const = default;
  15836. #else
  15837. bool operator==( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  15838. {
  15839. return ( sType == rhs.sType )
  15840. && ( pNext == rhs.pNext )
  15841. && ( stencilInitialLayout == rhs.stencilInitialLayout )
  15842. && ( stencilFinalLayout == rhs.stencilFinalLayout );
  15843. }
  15844. bool operator!=( AttachmentDescriptionStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  15845. {
  15846. return !operator==( rhs );
  15847. }
  15848. #endif
  15849. public:
  15850. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentDescriptionStencilLayout;
  15851. void* pNext = {};
  15852. VULKAN_HPP_NAMESPACE::ImageLayout stencilInitialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15853. VULKAN_HPP_NAMESPACE::ImageLayout stencilFinalLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15854. };
  15855. static_assert( sizeof( AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ), "struct and wrapper have different size!" );
  15856. static_assert( std::is_standard_layout<AttachmentDescriptionStencilLayout>::value, "struct wrapper is not a standard layout!" );
  15857. template <>
  15858. struct CppType<StructureType, StructureType::eAttachmentDescriptionStencilLayout>
  15859. {
  15860. using Type = AttachmentDescriptionStencilLayout;
  15861. };
  15862. using AttachmentDescriptionStencilLayoutKHR = AttachmentDescriptionStencilLayout;
  15863. struct AttachmentReference
  15864. {
  15865. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15866. VULKAN_HPP_CONSTEXPR AttachmentReference(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  15867. : attachment( attachment_ ), layout( layout_ )
  15868. {}
  15869. VULKAN_HPP_CONSTEXPR AttachmentReference( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15870. AttachmentReference( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
  15871. : AttachmentReference( *reinterpret_cast<AttachmentReference const *>( &rhs ) )
  15872. {}
  15873. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15874. VULKAN_HPP_CONSTEXPR_14 AttachmentReference & operator=( AttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15875. AttachmentReference & operator=( VkAttachmentReference const & rhs ) VULKAN_HPP_NOEXCEPT
  15876. {
  15877. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference const *>( &rhs );
  15878. return *this;
  15879. }
  15880. AttachmentReference & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
  15881. {
  15882. attachment = attachment_;
  15883. return *this;
  15884. }
  15885. AttachmentReference & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
  15886. {
  15887. layout = layout_;
  15888. return *this;
  15889. }
  15890. operator VkAttachmentReference const&() const VULKAN_HPP_NOEXCEPT
  15891. {
  15892. return *reinterpret_cast<const VkAttachmentReference*>( this );
  15893. }
  15894. operator VkAttachmentReference &() VULKAN_HPP_NOEXCEPT
  15895. {
  15896. return *reinterpret_cast<VkAttachmentReference*>( this );
  15897. }
  15898. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15899. auto operator<=>( AttachmentReference const& ) const = default;
  15900. #else
  15901. bool operator==( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
  15902. {
  15903. return ( attachment == rhs.attachment )
  15904. && ( layout == rhs.layout );
  15905. }
  15906. bool operator!=( AttachmentReference const& rhs ) const VULKAN_HPP_NOEXCEPT
  15907. {
  15908. return !operator==( rhs );
  15909. }
  15910. #endif
  15911. public:
  15912. uint32_t attachment = {};
  15913. VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15914. };
  15915. static_assert( sizeof( AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
  15916. static_assert( std::is_standard_layout<AttachmentReference>::value, "struct wrapper is not a standard layout!" );
  15917. struct AttachmentReference2
  15918. {
  15919. static const bool allowDuplicate = false;
  15920. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReference2;
  15921. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15922. VULKAN_HPP_CONSTEXPR AttachmentReference2(uint32_t attachment_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout layout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
  15923. : attachment( attachment_ ), layout( layout_ ), aspectMask( aspectMask_ )
  15924. {}
  15925. VULKAN_HPP_CONSTEXPR AttachmentReference2( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15926. AttachmentReference2( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
  15927. : AttachmentReference2( *reinterpret_cast<AttachmentReference2 const *>( &rhs ) )
  15928. {}
  15929. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  15930. VULKAN_HPP_CONSTEXPR_14 AttachmentReference2 & operator=( AttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  15931. AttachmentReference2 & operator=( VkAttachmentReference2 const & rhs ) VULKAN_HPP_NOEXCEPT
  15932. {
  15933. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReference2 const *>( &rhs );
  15934. return *this;
  15935. }
  15936. AttachmentReference2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  15937. {
  15938. pNext = pNext_;
  15939. return *this;
  15940. }
  15941. AttachmentReference2 & setAttachment( uint32_t attachment_ ) VULKAN_HPP_NOEXCEPT
  15942. {
  15943. attachment = attachment_;
  15944. return *this;
  15945. }
  15946. AttachmentReference2 & setLayout( VULKAN_HPP_NAMESPACE::ImageLayout layout_ ) VULKAN_HPP_NOEXCEPT
  15947. {
  15948. layout = layout_;
  15949. return *this;
  15950. }
  15951. AttachmentReference2 & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  15952. {
  15953. aspectMask = aspectMask_;
  15954. return *this;
  15955. }
  15956. operator VkAttachmentReference2 const&() const VULKAN_HPP_NOEXCEPT
  15957. {
  15958. return *reinterpret_cast<const VkAttachmentReference2*>( this );
  15959. }
  15960. operator VkAttachmentReference2 &() VULKAN_HPP_NOEXCEPT
  15961. {
  15962. return *reinterpret_cast<VkAttachmentReference2*>( this );
  15963. }
  15964. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  15965. auto operator<=>( AttachmentReference2 const& ) const = default;
  15966. #else
  15967. bool operator==( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  15968. {
  15969. return ( sType == rhs.sType )
  15970. && ( pNext == rhs.pNext )
  15971. && ( attachment == rhs.attachment )
  15972. && ( layout == rhs.layout )
  15973. && ( aspectMask == rhs.aspectMask );
  15974. }
  15975. bool operator!=( AttachmentReference2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  15976. {
  15977. return !operator==( rhs );
  15978. }
  15979. #endif
  15980. public:
  15981. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReference2;
  15982. const void* pNext = {};
  15983. uint32_t attachment = {};
  15984. VULKAN_HPP_NAMESPACE::ImageLayout layout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  15985. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  15986. };
  15987. static_assert( sizeof( AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
  15988. static_assert( std::is_standard_layout<AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
  15989. template <>
  15990. struct CppType<StructureType, StructureType::eAttachmentReference2>
  15991. {
  15992. using Type = AttachmentReference2;
  15993. };
  15994. using AttachmentReference2KHR = AttachmentReference2;
  15995. struct AttachmentReferenceStencilLayout
  15996. {
  15997. static const bool allowDuplicate = false;
  15998. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eAttachmentReferenceStencilLayout;
  15999. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16000. VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout(VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  16001. : stencilLayout( stencilLayout_ )
  16002. {}
  16003. VULKAN_HPP_CONSTEXPR AttachmentReferenceStencilLayout( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16004. AttachmentReferenceStencilLayout( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  16005. : AttachmentReferenceStencilLayout( *reinterpret_cast<AttachmentReferenceStencilLayout const *>( &rhs ) )
  16006. {}
  16007. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16008. VULKAN_HPP_CONSTEXPR_14 AttachmentReferenceStencilLayout & operator=( AttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16009. AttachmentReferenceStencilLayout & operator=( VkAttachmentReferenceStencilLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  16010. {
  16011. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout const *>( &rhs );
  16012. return *this;
  16013. }
  16014. AttachmentReferenceStencilLayout & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16015. {
  16016. pNext = pNext_;
  16017. return *this;
  16018. }
  16019. AttachmentReferenceStencilLayout & setStencilLayout( VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout_ ) VULKAN_HPP_NOEXCEPT
  16020. {
  16021. stencilLayout = stencilLayout_;
  16022. return *this;
  16023. }
  16024. operator VkAttachmentReferenceStencilLayout const&() const VULKAN_HPP_NOEXCEPT
  16025. {
  16026. return *reinterpret_cast<const VkAttachmentReferenceStencilLayout*>( this );
  16027. }
  16028. operator VkAttachmentReferenceStencilLayout &() VULKAN_HPP_NOEXCEPT
  16029. {
  16030. return *reinterpret_cast<VkAttachmentReferenceStencilLayout*>( this );
  16031. }
  16032. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16033. auto operator<=>( AttachmentReferenceStencilLayout const& ) const = default;
  16034. #else
  16035. bool operator==( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  16036. {
  16037. return ( sType == rhs.sType )
  16038. && ( pNext == rhs.pNext )
  16039. && ( stencilLayout == rhs.stencilLayout );
  16040. }
  16041. bool operator!=( AttachmentReferenceStencilLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  16042. {
  16043. return !operator==( rhs );
  16044. }
  16045. #endif
  16046. public:
  16047. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eAttachmentReferenceStencilLayout;
  16048. void* pNext = {};
  16049. VULKAN_HPP_NAMESPACE::ImageLayout stencilLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  16050. };
  16051. static_assert( sizeof( AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ), "struct and wrapper have different size!" );
  16052. static_assert( std::is_standard_layout<AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
  16053. template <>
  16054. struct CppType<StructureType, StructureType::eAttachmentReferenceStencilLayout>
  16055. {
  16056. using Type = AttachmentReferenceStencilLayout;
  16057. };
  16058. using AttachmentReferenceStencilLayoutKHR = AttachmentReferenceStencilLayout;
  16059. struct Extent2D
  16060. {
  16061. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16062. VULKAN_HPP_CONSTEXPR Extent2D(uint32_t width_ = {}, uint32_t height_ = {}) VULKAN_HPP_NOEXCEPT
  16063. : width( width_ ), height( height_ )
  16064. {}
  16065. VULKAN_HPP_CONSTEXPR Extent2D( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16066. Extent2D( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16067. : Extent2D( *reinterpret_cast<Extent2D const *>( &rhs ) )
  16068. {}
  16069. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16070. VULKAN_HPP_CONSTEXPR_14 Extent2D & operator=( Extent2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16071. Extent2D & operator=( VkExtent2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16072. {
  16073. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent2D const *>( &rhs );
  16074. return *this;
  16075. }
  16076. Extent2D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
  16077. {
  16078. width = width_;
  16079. return *this;
  16080. }
  16081. Extent2D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
  16082. {
  16083. height = height_;
  16084. return *this;
  16085. }
  16086. operator VkExtent2D const&() const VULKAN_HPP_NOEXCEPT
  16087. {
  16088. return *reinterpret_cast<const VkExtent2D*>( this );
  16089. }
  16090. operator VkExtent2D &() VULKAN_HPP_NOEXCEPT
  16091. {
  16092. return *reinterpret_cast<VkExtent2D*>( this );
  16093. }
  16094. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16095. auto operator<=>( Extent2D const& ) const = default;
  16096. #else
  16097. bool operator==( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16098. {
  16099. return ( width == rhs.width )
  16100. && ( height == rhs.height );
  16101. }
  16102. bool operator!=( Extent2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16103. {
  16104. return !operator==( rhs );
  16105. }
  16106. #endif
  16107. public:
  16108. uint32_t width = {};
  16109. uint32_t height = {};
  16110. };
  16111. static_assert( sizeof( Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
  16112. static_assert( std::is_standard_layout<Extent2D>::value, "struct wrapper is not a standard layout!" );
  16113. struct SampleLocationEXT
  16114. {
  16115. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16116. VULKAN_HPP_CONSTEXPR SampleLocationEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
  16117. : x( x_ ), y( y_ )
  16118. {}
  16119. VULKAN_HPP_CONSTEXPR SampleLocationEXT( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16120. SampleLocationEXT( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16121. : SampleLocationEXT( *reinterpret_cast<SampleLocationEXT const *>( &rhs ) )
  16122. {}
  16123. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16124. VULKAN_HPP_CONSTEXPR_14 SampleLocationEXT & operator=( SampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16125. SampleLocationEXT & operator=( VkSampleLocationEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16126. {
  16127. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationEXT const *>( &rhs );
  16128. return *this;
  16129. }
  16130. SampleLocationEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
  16131. {
  16132. x = x_;
  16133. return *this;
  16134. }
  16135. SampleLocationEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
  16136. {
  16137. y = y_;
  16138. return *this;
  16139. }
  16140. operator VkSampleLocationEXT const&() const VULKAN_HPP_NOEXCEPT
  16141. {
  16142. return *reinterpret_cast<const VkSampleLocationEXT*>( this );
  16143. }
  16144. operator VkSampleLocationEXT &() VULKAN_HPP_NOEXCEPT
  16145. {
  16146. return *reinterpret_cast<VkSampleLocationEXT*>( this );
  16147. }
  16148. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16149. auto operator<=>( SampleLocationEXT const& ) const = default;
  16150. #else
  16151. bool operator==( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16152. {
  16153. return ( x == rhs.x )
  16154. && ( y == rhs.y );
  16155. }
  16156. bool operator!=( SampleLocationEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16157. {
  16158. return !operator==( rhs );
  16159. }
  16160. #endif
  16161. public:
  16162. float x = {};
  16163. float y = {};
  16164. };
  16165. static_assert( sizeof( SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
  16166. static_assert( std::is_standard_layout<SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
  16167. struct SampleLocationsInfoEXT
  16168. {
  16169. static const bool allowDuplicate = false;
  16170. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSampleLocationsInfoEXT;
  16171. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16172. VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT(VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_ = {}, uint32_t sampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
  16173. : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( sampleLocationsCount_ ), pSampleLocations( pSampleLocations_ )
  16174. {}
  16175. VULKAN_HPP_CONSTEXPR SampleLocationsInfoEXT( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16176. SampleLocationsInfoEXT( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16177. : SampleLocationsInfoEXT( *reinterpret_cast<SampleLocationsInfoEXT const *>( &rhs ) )
  16178. {}
  16179. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16180. SampleLocationsInfoEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_, VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ )
  16181. : sampleLocationsPerPixel( sampleLocationsPerPixel_ ), sampleLocationGridSize( sampleLocationGridSize_ ), sampleLocationsCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
  16182. {}
  16183. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16184. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16185. VULKAN_HPP_CONSTEXPR_14 SampleLocationsInfoEXT & operator=( SampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16186. SampleLocationsInfoEXT & operator=( VkSampleLocationsInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16187. {
  16188. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const *>( &rhs );
  16189. return *this;
  16190. }
  16191. SampleLocationsInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16192. {
  16193. pNext = pNext_;
  16194. return *this;
  16195. }
  16196. SampleLocationsInfoEXT & setSampleLocationsPerPixel( VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel_ ) VULKAN_HPP_NOEXCEPT
  16197. {
  16198. sampleLocationsPerPixel = sampleLocationsPerPixel_;
  16199. return *this;
  16200. }
  16201. SampleLocationsInfoEXT & setSampleLocationGridSize( VULKAN_HPP_NAMESPACE::Extent2D const & sampleLocationGridSize_ ) VULKAN_HPP_NOEXCEPT
  16202. {
  16203. sampleLocationGridSize = sampleLocationGridSize_;
  16204. return *this;
  16205. }
  16206. SampleLocationsInfoEXT & setSampleLocationsCount( uint32_t sampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
  16207. {
  16208. sampleLocationsCount = sampleLocationsCount_;
  16209. return *this;
  16210. }
  16211. SampleLocationsInfoEXT & setPSampleLocations( const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  16212. {
  16213. pSampleLocations = pSampleLocations_;
  16214. return *this;
  16215. }
  16216. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16217. SampleLocationsInfoEXT & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SampleLocationEXT> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
  16218. {
  16219. sampleLocationsCount = static_cast<uint32_t>( sampleLocations_.size() );
  16220. pSampleLocations = sampleLocations_.data();
  16221. return *this;
  16222. }
  16223. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16224. operator VkSampleLocationsInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  16225. {
  16226. return *reinterpret_cast<const VkSampleLocationsInfoEXT*>( this );
  16227. }
  16228. operator VkSampleLocationsInfoEXT &() VULKAN_HPP_NOEXCEPT
  16229. {
  16230. return *reinterpret_cast<VkSampleLocationsInfoEXT*>( this );
  16231. }
  16232. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16233. auto operator<=>( SampleLocationsInfoEXT const& ) const = default;
  16234. #else
  16235. bool operator==( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16236. {
  16237. return ( sType == rhs.sType )
  16238. && ( pNext == rhs.pNext )
  16239. && ( sampleLocationsPerPixel == rhs.sampleLocationsPerPixel )
  16240. && ( sampleLocationGridSize == rhs.sampleLocationGridSize )
  16241. && ( sampleLocationsCount == rhs.sampleLocationsCount )
  16242. && ( pSampleLocations == rhs.pSampleLocations );
  16243. }
  16244. bool operator!=( SampleLocationsInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16245. {
  16246. return !operator==( rhs );
  16247. }
  16248. #endif
  16249. public:
  16250. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSampleLocationsInfoEXT;
  16251. const void* pNext = {};
  16252. VULKAN_HPP_NAMESPACE::SampleCountFlagBits sampleLocationsPerPixel = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  16253. VULKAN_HPP_NAMESPACE::Extent2D sampleLocationGridSize = {};
  16254. uint32_t sampleLocationsCount = {};
  16255. const VULKAN_HPP_NAMESPACE::SampleLocationEXT* pSampleLocations = {};
  16256. };
  16257. static_assert( sizeof( SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ), "struct and wrapper have different size!" );
  16258. static_assert( std::is_standard_layout<SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
  16259. template <>
  16260. struct CppType<StructureType, StructureType::eSampleLocationsInfoEXT>
  16261. {
  16262. using Type = SampleLocationsInfoEXT;
  16263. };
  16264. struct AttachmentSampleLocationsEXT
  16265. {
  16266. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16267. VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT(uint32_t attachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
  16268. : attachmentIndex( attachmentIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
  16269. {}
  16270. VULKAN_HPP_CONSTEXPR AttachmentSampleLocationsEXT( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16271. AttachmentSampleLocationsEXT( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16272. : AttachmentSampleLocationsEXT( *reinterpret_cast<AttachmentSampleLocationsEXT const *>( &rhs ) )
  16273. {}
  16274. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16275. VULKAN_HPP_CONSTEXPR_14 AttachmentSampleLocationsEXT & operator=( AttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16276. AttachmentSampleLocationsEXT & operator=( VkAttachmentSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  16277. {
  16278. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT const *>( &rhs );
  16279. return *this;
  16280. }
  16281. AttachmentSampleLocationsEXT & setAttachmentIndex( uint32_t attachmentIndex_ ) VULKAN_HPP_NOEXCEPT
  16282. {
  16283. attachmentIndex = attachmentIndex_;
  16284. return *this;
  16285. }
  16286. AttachmentSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
  16287. {
  16288. sampleLocationsInfo = sampleLocationsInfo_;
  16289. return *this;
  16290. }
  16291. operator VkAttachmentSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
  16292. {
  16293. return *reinterpret_cast<const VkAttachmentSampleLocationsEXT*>( this );
  16294. }
  16295. operator VkAttachmentSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
  16296. {
  16297. return *reinterpret_cast<VkAttachmentSampleLocationsEXT*>( this );
  16298. }
  16299. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16300. auto operator<=>( AttachmentSampleLocationsEXT const& ) const = default;
  16301. #else
  16302. bool operator==( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16303. {
  16304. return ( attachmentIndex == rhs.attachmentIndex )
  16305. && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
  16306. }
  16307. bool operator!=( AttachmentSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  16308. {
  16309. return !operator==( rhs );
  16310. }
  16311. #endif
  16312. public:
  16313. uint32_t attachmentIndex = {};
  16314. VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
  16315. };
  16316. static_assert( sizeof( AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ), "struct and wrapper have different size!" );
  16317. static_assert( std::is_standard_layout<AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
  16318. struct BaseInStructure
  16319. {
  16320. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16321. BaseInStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
  16322. : sType( sType_ )
  16323. {}
  16324. BaseInStructure( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16325. BaseInStructure( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
  16326. : BaseInStructure( *reinterpret_cast<BaseInStructure const *>( &rhs ) )
  16327. {}
  16328. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16329. BaseInStructure & operator=( BaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16330. BaseInStructure & operator=( VkBaseInStructure const & rhs ) VULKAN_HPP_NOEXCEPT
  16331. {
  16332. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseInStructure const *>( &rhs );
  16333. return *this;
  16334. }
  16335. BaseInStructure & setPNext( const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
  16336. {
  16337. pNext = pNext_;
  16338. return *this;
  16339. }
  16340. operator VkBaseInStructure const&() const VULKAN_HPP_NOEXCEPT
  16341. {
  16342. return *reinterpret_cast<const VkBaseInStructure*>( this );
  16343. }
  16344. operator VkBaseInStructure &() VULKAN_HPP_NOEXCEPT
  16345. {
  16346. return *reinterpret_cast<VkBaseInStructure*>( this );
  16347. }
  16348. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16349. auto operator<=>( BaseInStructure const& ) const = default;
  16350. #else
  16351. bool operator==( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
  16352. {
  16353. return ( sType == rhs.sType )
  16354. && ( pNext == rhs.pNext );
  16355. }
  16356. bool operator!=( BaseInStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
  16357. {
  16358. return !operator==( rhs );
  16359. }
  16360. #endif
  16361. public:
  16362. VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
  16363. const struct VULKAN_HPP_NAMESPACE::BaseInStructure* pNext = {};
  16364. };
  16365. static_assert( sizeof( BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
  16366. static_assert( std::is_standard_layout<BaseInStructure>::value, "struct wrapper is not a standard layout!" );
  16367. struct BaseOutStructure
  16368. {
  16369. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16370. BaseOutStructure(VULKAN_HPP_NAMESPACE::StructureType sType_ = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo) VULKAN_HPP_NOEXCEPT
  16371. : sType( sType_ )
  16372. {}
  16373. BaseOutStructure( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16374. BaseOutStructure( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
  16375. : BaseOutStructure( *reinterpret_cast<BaseOutStructure const *>( &rhs ) )
  16376. {}
  16377. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16378. BaseOutStructure & operator=( BaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16379. BaseOutStructure & operator=( VkBaseOutStructure const & rhs ) VULKAN_HPP_NOEXCEPT
  16380. {
  16381. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BaseOutStructure const *>( &rhs );
  16382. return *this;
  16383. }
  16384. BaseOutStructure & setPNext( struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext_ ) VULKAN_HPP_NOEXCEPT
  16385. {
  16386. pNext = pNext_;
  16387. return *this;
  16388. }
  16389. operator VkBaseOutStructure const&() const VULKAN_HPP_NOEXCEPT
  16390. {
  16391. return *reinterpret_cast<const VkBaseOutStructure*>( this );
  16392. }
  16393. operator VkBaseOutStructure &() VULKAN_HPP_NOEXCEPT
  16394. {
  16395. return *reinterpret_cast<VkBaseOutStructure*>( this );
  16396. }
  16397. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16398. auto operator<=>( BaseOutStructure const& ) const = default;
  16399. #else
  16400. bool operator==( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
  16401. {
  16402. return ( sType == rhs.sType )
  16403. && ( pNext == rhs.pNext );
  16404. }
  16405. bool operator!=( BaseOutStructure const& rhs ) const VULKAN_HPP_NOEXCEPT
  16406. {
  16407. return !operator==( rhs );
  16408. }
  16409. #endif
  16410. public:
  16411. VULKAN_HPP_NAMESPACE::StructureType sType = VULKAN_HPP_NAMESPACE::StructureType::eApplicationInfo;
  16412. struct VULKAN_HPP_NAMESPACE::BaseOutStructure* pNext = {};
  16413. };
  16414. static_assert( sizeof( BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
  16415. static_assert( std::is_standard_layout<BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
  16416. class DeviceMemory
  16417. {
  16418. public:
  16419. using CType = VkDeviceMemory;
  16420. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory;
  16421. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory;
  16422. public:
  16423. VULKAN_HPP_CONSTEXPR DeviceMemory() VULKAN_HPP_NOEXCEPT
  16424. : m_deviceMemory(VK_NULL_HANDLE)
  16425. {}
  16426. VULKAN_HPP_CONSTEXPR DeviceMemory( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  16427. : m_deviceMemory(VK_NULL_HANDLE)
  16428. {}
  16429. VULKAN_HPP_TYPESAFE_EXPLICIT DeviceMemory( VkDeviceMemory deviceMemory ) VULKAN_HPP_NOEXCEPT
  16430. : m_deviceMemory( deviceMemory )
  16431. {}
  16432. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  16433. DeviceMemory & operator=(VkDeviceMemory deviceMemory) VULKAN_HPP_NOEXCEPT
  16434. {
  16435. m_deviceMemory = deviceMemory;
  16436. return *this;
  16437. }
  16438. #endif
  16439. DeviceMemory & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  16440. {
  16441. m_deviceMemory = VK_NULL_HANDLE;
  16442. return *this;
  16443. }
  16444. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16445. auto operator<=>( DeviceMemory const& ) const = default;
  16446. #else
  16447. bool operator==( DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
  16448. {
  16449. return m_deviceMemory == rhs.m_deviceMemory;
  16450. }
  16451. bool operator!=(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
  16452. {
  16453. return m_deviceMemory != rhs.m_deviceMemory;
  16454. }
  16455. bool operator<(DeviceMemory const & rhs ) const VULKAN_HPP_NOEXCEPT
  16456. {
  16457. return m_deviceMemory < rhs.m_deviceMemory;
  16458. }
  16459. #endif
  16460. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeviceMemory() const VULKAN_HPP_NOEXCEPT
  16461. {
  16462. return m_deviceMemory;
  16463. }
  16464. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  16465. {
  16466. return m_deviceMemory != VK_NULL_HANDLE;
  16467. }
  16468. bool operator!() const VULKAN_HPP_NOEXCEPT
  16469. {
  16470. return m_deviceMemory == VK_NULL_HANDLE;
  16471. }
  16472. private:
  16473. VkDeviceMemory m_deviceMemory;
  16474. };
  16475. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
  16476. template <>
  16477. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeviceMemory>
  16478. {
  16479. using type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  16480. };
  16481. template <>
  16482. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeviceMemory>
  16483. {
  16484. using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  16485. };
  16486. template <>
  16487. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDeviceMemory>
  16488. {
  16489. using Type = VULKAN_HPP_NAMESPACE::DeviceMemory;
  16490. };
  16491. template <>
  16492. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeviceMemory>
  16493. {
  16494. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  16495. };
  16496. struct BindAccelerationStructureMemoryInfoNV
  16497. {
  16498. static const bool allowDuplicate = false;
  16499. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindAccelerationStructureMemoryInfoNV;
  16500. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16501. VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV(VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
  16502. : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
  16503. {}
  16504. VULKAN_HPP_CONSTEXPR BindAccelerationStructureMemoryInfoNV( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16505. BindAccelerationStructureMemoryInfoNV( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  16506. : BindAccelerationStructureMemoryInfoNV( *reinterpret_cast<BindAccelerationStructureMemoryInfoNV const *>( &rhs ) )
  16507. {}
  16508. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16509. BindAccelerationStructureMemoryInfoNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_, VULKAN_HPP_NAMESPACE::DeviceMemory memory_, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
  16510. : accelerationStructure( accelerationStructure_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
  16511. {}
  16512. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16513. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16514. VULKAN_HPP_CONSTEXPR_14 BindAccelerationStructureMemoryInfoNV & operator=( BindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16515. BindAccelerationStructureMemoryInfoNV & operator=( VkBindAccelerationStructureMemoryInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  16516. {
  16517. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV const *>( &rhs );
  16518. return *this;
  16519. }
  16520. BindAccelerationStructureMemoryInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16521. {
  16522. pNext = pNext_;
  16523. return *this;
  16524. }
  16525. BindAccelerationStructureMemoryInfoNV & setAccelerationStructure( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  16526. {
  16527. accelerationStructure = accelerationStructure_;
  16528. return *this;
  16529. }
  16530. BindAccelerationStructureMemoryInfoNV & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  16531. {
  16532. memory = memory_;
  16533. return *this;
  16534. }
  16535. BindAccelerationStructureMemoryInfoNV & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
  16536. {
  16537. memoryOffset = memoryOffset_;
  16538. return *this;
  16539. }
  16540. BindAccelerationStructureMemoryInfoNV & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
  16541. {
  16542. deviceIndexCount = deviceIndexCount_;
  16543. return *this;
  16544. }
  16545. BindAccelerationStructureMemoryInfoNV & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16546. {
  16547. pDeviceIndices = pDeviceIndices_;
  16548. return *this;
  16549. }
  16550. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16551. BindAccelerationStructureMemoryInfoNV & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16552. {
  16553. deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
  16554. pDeviceIndices = deviceIndices_.data();
  16555. return *this;
  16556. }
  16557. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16558. operator VkBindAccelerationStructureMemoryInfoNV const&() const VULKAN_HPP_NOEXCEPT
  16559. {
  16560. return *reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV*>( this );
  16561. }
  16562. operator VkBindAccelerationStructureMemoryInfoNV &() VULKAN_HPP_NOEXCEPT
  16563. {
  16564. return *reinterpret_cast<VkBindAccelerationStructureMemoryInfoNV*>( this );
  16565. }
  16566. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16567. auto operator<=>( BindAccelerationStructureMemoryInfoNV const& ) const = default;
  16568. #else
  16569. bool operator==( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  16570. {
  16571. return ( sType == rhs.sType )
  16572. && ( pNext == rhs.pNext )
  16573. && ( accelerationStructure == rhs.accelerationStructure )
  16574. && ( memory == rhs.memory )
  16575. && ( memoryOffset == rhs.memoryOffset )
  16576. && ( deviceIndexCount == rhs.deviceIndexCount )
  16577. && ( pDeviceIndices == rhs.pDeviceIndices );
  16578. }
  16579. bool operator!=( BindAccelerationStructureMemoryInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  16580. {
  16581. return !operator==( rhs );
  16582. }
  16583. #endif
  16584. public:
  16585. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindAccelerationStructureMemoryInfoNV;
  16586. const void* pNext = {};
  16587. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure = {};
  16588. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  16589. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
  16590. uint32_t deviceIndexCount = {};
  16591. const uint32_t* pDeviceIndices = {};
  16592. };
  16593. static_assert( sizeof( BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ), "struct and wrapper have different size!" );
  16594. static_assert( std::is_standard_layout<BindAccelerationStructureMemoryInfoNV>::value, "struct wrapper is not a standard layout!" );
  16595. template <>
  16596. struct CppType<StructureType, StructureType::eBindAccelerationStructureMemoryInfoNV>
  16597. {
  16598. using Type = BindAccelerationStructureMemoryInfoNV;
  16599. };
  16600. struct BindBufferMemoryDeviceGroupInfo
  16601. {
  16602. static const bool allowDuplicate = false;
  16603. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryDeviceGroupInfo;
  16604. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16605. VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
  16606. : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ )
  16607. {}
  16608. VULKAN_HPP_CONSTEXPR BindBufferMemoryDeviceGroupInfo( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16609. BindBufferMemoryDeviceGroupInfo( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16610. : BindBufferMemoryDeviceGroupInfo( *reinterpret_cast<BindBufferMemoryDeviceGroupInfo const *>( &rhs ) )
  16611. {}
  16612. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16613. BindBufferMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ )
  16614. : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() )
  16615. {}
  16616. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16617. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16618. VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryDeviceGroupInfo & operator=( BindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16619. BindBufferMemoryDeviceGroupInfo & operator=( VkBindBufferMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16620. {
  16621. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo const *>( &rhs );
  16622. return *this;
  16623. }
  16624. BindBufferMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16625. {
  16626. pNext = pNext_;
  16627. return *this;
  16628. }
  16629. BindBufferMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
  16630. {
  16631. deviceIndexCount = deviceIndexCount_;
  16632. return *this;
  16633. }
  16634. BindBufferMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16635. {
  16636. pDeviceIndices = pDeviceIndices_;
  16637. return *this;
  16638. }
  16639. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16640. BindBufferMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16641. {
  16642. deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
  16643. pDeviceIndices = deviceIndices_.data();
  16644. return *this;
  16645. }
  16646. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16647. operator VkBindBufferMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
  16648. {
  16649. return *reinterpret_cast<const VkBindBufferMemoryDeviceGroupInfo*>( this );
  16650. }
  16651. operator VkBindBufferMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
  16652. {
  16653. return *reinterpret_cast<VkBindBufferMemoryDeviceGroupInfo*>( this );
  16654. }
  16655. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16656. auto operator<=>( BindBufferMemoryDeviceGroupInfo const& ) const = default;
  16657. #else
  16658. bool operator==( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16659. {
  16660. return ( sType == rhs.sType )
  16661. && ( pNext == rhs.pNext )
  16662. && ( deviceIndexCount == rhs.deviceIndexCount )
  16663. && ( pDeviceIndices == rhs.pDeviceIndices );
  16664. }
  16665. bool operator!=( BindBufferMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16666. {
  16667. return !operator==( rhs );
  16668. }
  16669. #endif
  16670. public:
  16671. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryDeviceGroupInfo;
  16672. const void* pNext = {};
  16673. uint32_t deviceIndexCount = {};
  16674. const uint32_t* pDeviceIndices = {};
  16675. };
  16676. static_assert( sizeof( BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
  16677. static_assert( std::is_standard_layout<BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
  16678. template <>
  16679. struct CppType<StructureType, StructureType::eBindBufferMemoryDeviceGroupInfo>
  16680. {
  16681. using Type = BindBufferMemoryDeviceGroupInfo;
  16682. };
  16683. using BindBufferMemoryDeviceGroupInfoKHR = BindBufferMemoryDeviceGroupInfo;
  16684. struct BindBufferMemoryInfo
  16685. {
  16686. static const bool allowDuplicate = false;
  16687. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindBufferMemoryInfo;
  16688. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16689. VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
  16690. : buffer( buffer_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
  16691. {}
  16692. VULKAN_HPP_CONSTEXPR BindBufferMemoryInfo( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16693. BindBufferMemoryInfo( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16694. : BindBufferMemoryInfo( *reinterpret_cast<BindBufferMemoryInfo const *>( &rhs ) )
  16695. {}
  16696. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16697. VULKAN_HPP_CONSTEXPR_14 BindBufferMemoryInfo & operator=( BindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16698. BindBufferMemoryInfo & operator=( VkBindBufferMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16699. {
  16700. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo const *>( &rhs );
  16701. return *this;
  16702. }
  16703. BindBufferMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16704. {
  16705. pNext = pNext_;
  16706. return *this;
  16707. }
  16708. BindBufferMemoryInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  16709. {
  16710. buffer = buffer_;
  16711. return *this;
  16712. }
  16713. BindBufferMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  16714. {
  16715. memory = memory_;
  16716. return *this;
  16717. }
  16718. BindBufferMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
  16719. {
  16720. memoryOffset = memoryOffset_;
  16721. return *this;
  16722. }
  16723. operator VkBindBufferMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
  16724. {
  16725. return *reinterpret_cast<const VkBindBufferMemoryInfo*>( this );
  16726. }
  16727. operator VkBindBufferMemoryInfo &() VULKAN_HPP_NOEXCEPT
  16728. {
  16729. return *reinterpret_cast<VkBindBufferMemoryInfo*>( this );
  16730. }
  16731. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16732. auto operator<=>( BindBufferMemoryInfo const& ) const = default;
  16733. #else
  16734. bool operator==( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16735. {
  16736. return ( sType == rhs.sType )
  16737. && ( pNext == rhs.pNext )
  16738. && ( buffer == rhs.buffer )
  16739. && ( memory == rhs.memory )
  16740. && ( memoryOffset == rhs.memoryOffset );
  16741. }
  16742. bool operator!=( BindBufferMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16743. {
  16744. return !operator==( rhs );
  16745. }
  16746. #endif
  16747. public:
  16748. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindBufferMemoryInfo;
  16749. const void* pNext = {};
  16750. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  16751. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  16752. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
  16753. };
  16754. static_assert( sizeof( BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
  16755. static_assert( std::is_standard_layout<BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
  16756. template <>
  16757. struct CppType<StructureType, StructureType::eBindBufferMemoryInfo>
  16758. {
  16759. using Type = BindBufferMemoryInfo;
  16760. };
  16761. using BindBufferMemoryInfoKHR = BindBufferMemoryInfo;
  16762. struct Offset2D
  16763. {
  16764. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16765. VULKAN_HPP_CONSTEXPR Offset2D(int32_t x_ = {}, int32_t y_ = {}) VULKAN_HPP_NOEXCEPT
  16766. : x( x_ ), y( y_ )
  16767. {}
  16768. VULKAN_HPP_CONSTEXPR Offset2D( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16769. Offset2D( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16770. : Offset2D( *reinterpret_cast<Offset2D const *>( &rhs ) )
  16771. {}
  16772. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16773. VULKAN_HPP_CONSTEXPR_14 Offset2D & operator=( Offset2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16774. Offset2D & operator=( VkOffset2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16775. {
  16776. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset2D const *>( &rhs );
  16777. return *this;
  16778. }
  16779. Offset2D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
  16780. {
  16781. x = x_;
  16782. return *this;
  16783. }
  16784. Offset2D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
  16785. {
  16786. y = y_;
  16787. return *this;
  16788. }
  16789. operator VkOffset2D const&() const VULKAN_HPP_NOEXCEPT
  16790. {
  16791. return *reinterpret_cast<const VkOffset2D*>( this );
  16792. }
  16793. operator VkOffset2D &() VULKAN_HPP_NOEXCEPT
  16794. {
  16795. return *reinterpret_cast<VkOffset2D*>( this );
  16796. }
  16797. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16798. auto operator<=>( Offset2D const& ) const = default;
  16799. #else
  16800. bool operator==( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16801. {
  16802. return ( x == rhs.x )
  16803. && ( y == rhs.y );
  16804. }
  16805. bool operator!=( Offset2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16806. {
  16807. return !operator==( rhs );
  16808. }
  16809. #endif
  16810. public:
  16811. int32_t x = {};
  16812. int32_t y = {};
  16813. };
  16814. static_assert( sizeof( Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
  16815. static_assert( std::is_standard_layout<Offset2D>::value, "struct wrapper is not a standard layout!" );
  16816. struct Rect2D
  16817. {
  16818. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16819. VULKAN_HPP_CONSTEXPR Rect2D(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}) VULKAN_HPP_NOEXCEPT
  16820. : offset( offset_ ), extent( extent_ )
  16821. {}
  16822. VULKAN_HPP_CONSTEXPR Rect2D( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16823. Rect2D( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16824. : Rect2D( *reinterpret_cast<Rect2D const *>( &rhs ) )
  16825. {}
  16826. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16827. VULKAN_HPP_CONSTEXPR_14 Rect2D & operator=( Rect2D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16828. Rect2D & operator=( VkRect2D const & rhs ) VULKAN_HPP_NOEXCEPT
  16829. {
  16830. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Rect2D const *>( &rhs );
  16831. return *this;
  16832. }
  16833. Rect2D & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
  16834. {
  16835. offset = offset_;
  16836. return *this;
  16837. }
  16838. Rect2D & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
  16839. {
  16840. extent = extent_;
  16841. return *this;
  16842. }
  16843. operator VkRect2D const&() const VULKAN_HPP_NOEXCEPT
  16844. {
  16845. return *reinterpret_cast<const VkRect2D*>( this );
  16846. }
  16847. operator VkRect2D &() VULKAN_HPP_NOEXCEPT
  16848. {
  16849. return *reinterpret_cast<VkRect2D*>( this );
  16850. }
  16851. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16852. auto operator<=>( Rect2D const& ) const = default;
  16853. #else
  16854. bool operator==( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16855. {
  16856. return ( offset == rhs.offset )
  16857. && ( extent == rhs.extent );
  16858. }
  16859. bool operator!=( Rect2D const& rhs ) const VULKAN_HPP_NOEXCEPT
  16860. {
  16861. return !operator==( rhs );
  16862. }
  16863. #endif
  16864. public:
  16865. VULKAN_HPP_NAMESPACE::Offset2D offset = {};
  16866. VULKAN_HPP_NAMESPACE::Extent2D extent = {};
  16867. };
  16868. static_assert( sizeof( Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
  16869. static_assert( std::is_standard_layout<Rect2D>::value, "struct wrapper is not a standard layout!" );
  16870. struct BindImageMemoryDeviceGroupInfo
  16871. {
  16872. static const bool allowDuplicate = false;
  16873. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryDeviceGroupInfo;
  16874. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16875. VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo(uint32_t deviceIndexCount_ = {}, const uint32_t* pDeviceIndices_ = {}, uint32_t splitInstanceBindRegionCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ = {}) VULKAN_HPP_NOEXCEPT
  16876. : deviceIndexCount( deviceIndexCount_ ), pDeviceIndices( pDeviceIndices_ ), splitInstanceBindRegionCount( splitInstanceBindRegionCount_ ), pSplitInstanceBindRegions( pSplitInstanceBindRegions_ )
  16877. {}
  16878. VULKAN_HPP_CONSTEXPR BindImageMemoryDeviceGroupInfo( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16879. BindImageMemoryDeviceGroupInfo( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16880. : BindImageMemoryDeviceGroupInfo( *reinterpret_cast<BindImageMemoryDeviceGroupInfo const *>( &rhs ) )
  16881. {}
  16882. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16883. BindImageMemoryDeviceGroupInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ = {} )
  16884. : deviceIndexCount( static_cast<uint32_t>( deviceIndices_.size() ) ), pDeviceIndices( deviceIndices_.data() ), splitInstanceBindRegionCount( static_cast<uint32_t>( splitInstanceBindRegions_.size() ) ), pSplitInstanceBindRegions( splitInstanceBindRegions_.data() )
  16885. {}
  16886. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16887. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  16888. VULKAN_HPP_CONSTEXPR_14 BindImageMemoryDeviceGroupInfo & operator=( BindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  16889. BindImageMemoryDeviceGroupInfo & operator=( VkBindImageMemoryDeviceGroupInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  16890. {
  16891. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo const *>( &rhs );
  16892. return *this;
  16893. }
  16894. BindImageMemoryDeviceGroupInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  16895. {
  16896. pNext = pNext_;
  16897. return *this;
  16898. }
  16899. BindImageMemoryDeviceGroupInfo & setDeviceIndexCount( uint32_t deviceIndexCount_ ) VULKAN_HPP_NOEXCEPT
  16900. {
  16901. deviceIndexCount = deviceIndexCount_;
  16902. return *this;
  16903. }
  16904. BindImageMemoryDeviceGroupInfo & setPDeviceIndices( const uint32_t* pDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16905. {
  16906. pDeviceIndices = pDeviceIndices_;
  16907. return *this;
  16908. }
  16909. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16910. BindImageMemoryDeviceGroupInfo & setDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceIndices_ ) VULKAN_HPP_NOEXCEPT
  16911. {
  16912. deviceIndexCount = static_cast<uint32_t>( deviceIndices_.size() );
  16913. pDeviceIndices = deviceIndices_.data();
  16914. return *this;
  16915. }
  16916. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16917. BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegionCount( uint32_t splitInstanceBindRegionCount_ ) VULKAN_HPP_NOEXCEPT
  16918. {
  16919. splitInstanceBindRegionCount = splitInstanceBindRegionCount_;
  16920. return *this;
  16921. }
  16922. BindImageMemoryDeviceGroupInfo & setPSplitInstanceBindRegions( const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
  16923. {
  16924. pSplitInstanceBindRegions = pSplitInstanceBindRegions_;
  16925. return *this;
  16926. }
  16927. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16928. BindImageMemoryDeviceGroupInfo & setSplitInstanceBindRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & splitInstanceBindRegions_ ) VULKAN_HPP_NOEXCEPT
  16929. {
  16930. splitInstanceBindRegionCount = static_cast<uint32_t>( splitInstanceBindRegions_.size() );
  16931. pSplitInstanceBindRegions = splitInstanceBindRegions_.data();
  16932. return *this;
  16933. }
  16934. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  16935. operator VkBindImageMemoryDeviceGroupInfo const&() const VULKAN_HPP_NOEXCEPT
  16936. {
  16937. return *reinterpret_cast<const VkBindImageMemoryDeviceGroupInfo*>( this );
  16938. }
  16939. operator VkBindImageMemoryDeviceGroupInfo &() VULKAN_HPP_NOEXCEPT
  16940. {
  16941. return *reinterpret_cast<VkBindImageMemoryDeviceGroupInfo*>( this );
  16942. }
  16943. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  16944. auto operator<=>( BindImageMemoryDeviceGroupInfo const& ) const = default;
  16945. #else
  16946. bool operator==( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16947. {
  16948. return ( sType == rhs.sType )
  16949. && ( pNext == rhs.pNext )
  16950. && ( deviceIndexCount == rhs.deviceIndexCount )
  16951. && ( pDeviceIndices == rhs.pDeviceIndices )
  16952. && ( splitInstanceBindRegionCount == rhs.splitInstanceBindRegionCount )
  16953. && ( pSplitInstanceBindRegions == rhs.pSplitInstanceBindRegions );
  16954. }
  16955. bool operator!=( BindImageMemoryDeviceGroupInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  16956. {
  16957. return !operator==( rhs );
  16958. }
  16959. #endif
  16960. public:
  16961. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryDeviceGroupInfo;
  16962. const void* pNext = {};
  16963. uint32_t deviceIndexCount = {};
  16964. const uint32_t* pDeviceIndices = {};
  16965. uint32_t splitInstanceBindRegionCount = {};
  16966. const VULKAN_HPP_NAMESPACE::Rect2D* pSplitInstanceBindRegions = {};
  16967. };
  16968. static_assert( sizeof( BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ), "struct and wrapper have different size!" );
  16969. static_assert( std::is_standard_layout<BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
  16970. template <>
  16971. struct CppType<StructureType, StructureType::eBindImageMemoryDeviceGroupInfo>
  16972. {
  16973. using Type = BindImageMemoryDeviceGroupInfo;
  16974. };
  16975. using BindImageMemoryDeviceGroupInfoKHR = BindImageMemoryDeviceGroupInfo;
  16976. class Image
  16977. {
  16978. public:
  16979. using CType = VkImage;
  16980. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImage;
  16981. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage;
  16982. public:
  16983. VULKAN_HPP_CONSTEXPR Image() VULKAN_HPP_NOEXCEPT
  16984. : m_image(VK_NULL_HANDLE)
  16985. {}
  16986. VULKAN_HPP_CONSTEXPR Image( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  16987. : m_image(VK_NULL_HANDLE)
  16988. {}
  16989. VULKAN_HPP_TYPESAFE_EXPLICIT Image( VkImage image ) VULKAN_HPP_NOEXCEPT
  16990. : m_image( image )
  16991. {}
  16992. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  16993. Image & operator=(VkImage image) VULKAN_HPP_NOEXCEPT
  16994. {
  16995. m_image = image;
  16996. return *this;
  16997. }
  16998. #endif
  16999. Image & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  17000. {
  17001. m_image = VK_NULL_HANDLE;
  17002. return *this;
  17003. }
  17004. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17005. auto operator<=>( Image const& ) const = default;
  17006. #else
  17007. bool operator==( Image const & rhs ) const VULKAN_HPP_NOEXCEPT
  17008. {
  17009. return m_image == rhs.m_image;
  17010. }
  17011. bool operator!=(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
  17012. {
  17013. return m_image != rhs.m_image;
  17014. }
  17015. bool operator<(Image const & rhs ) const VULKAN_HPP_NOEXCEPT
  17016. {
  17017. return m_image < rhs.m_image;
  17018. }
  17019. #endif
  17020. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImage() const VULKAN_HPP_NOEXCEPT
  17021. {
  17022. return m_image;
  17023. }
  17024. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  17025. {
  17026. return m_image != VK_NULL_HANDLE;
  17027. }
  17028. bool operator!() const VULKAN_HPP_NOEXCEPT
  17029. {
  17030. return m_image == VK_NULL_HANDLE;
  17031. }
  17032. private:
  17033. VkImage m_image;
  17034. };
  17035. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
  17036. template <>
  17037. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImage>
  17038. {
  17039. using type = VULKAN_HPP_NAMESPACE::Image;
  17040. };
  17041. template <>
  17042. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImage>
  17043. {
  17044. using Type = VULKAN_HPP_NAMESPACE::Image;
  17045. };
  17046. template <>
  17047. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImage>
  17048. {
  17049. using Type = VULKAN_HPP_NAMESPACE::Image;
  17050. };
  17051. template <>
  17052. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Image>
  17053. {
  17054. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  17055. };
  17056. struct BindImageMemoryInfo
  17057. {
  17058. static const bool allowDuplicate = false;
  17059. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemoryInfo;
  17060. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17061. VULKAN_HPP_CONSTEXPR BindImageMemoryInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}) VULKAN_HPP_NOEXCEPT
  17062. : image( image_ ), memory( memory_ ), memoryOffset( memoryOffset_ )
  17063. {}
  17064. VULKAN_HPP_CONSTEXPR BindImageMemoryInfo( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17065. BindImageMemoryInfo( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17066. : BindImageMemoryInfo( *reinterpret_cast<BindImageMemoryInfo const *>( &rhs ) )
  17067. {}
  17068. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17069. VULKAN_HPP_CONSTEXPR_14 BindImageMemoryInfo & operator=( BindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17070. BindImageMemoryInfo & operator=( VkBindImageMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17071. {
  17072. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo const *>( &rhs );
  17073. return *this;
  17074. }
  17075. BindImageMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  17076. {
  17077. pNext = pNext_;
  17078. return *this;
  17079. }
  17080. BindImageMemoryInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  17081. {
  17082. image = image_;
  17083. return *this;
  17084. }
  17085. BindImageMemoryInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  17086. {
  17087. memory = memory_;
  17088. return *this;
  17089. }
  17090. BindImageMemoryInfo & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
  17091. {
  17092. memoryOffset = memoryOffset_;
  17093. return *this;
  17094. }
  17095. operator VkBindImageMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
  17096. {
  17097. return *reinterpret_cast<const VkBindImageMemoryInfo*>( this );
  17098. }
  17099. operator VkBindImageMemoryInfo &() VULKAN_HPP_NOEXCEPT
  17100. {
  17101. return *reinterpret_cast<VkBindImageMemoryInfo*>( this );
  17102. }
  17103. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17104. auto operator<=>( BindImageMemoryInfo const& ) const = default;
  17105. #else
  17106. bool operator==( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17107. {
  17108. return ( sType == rhs.sType )
  17109. && ( pNext == rhs.pNext )
  17110. && ( image == rhs.image )
  17111. && ( memory == rhs.memory )
  17112. && ( memoryOffset == rhs.memoryOffset );
  17113. }
  17114. bool operator!=( BindImageMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17115. {
  17116. return !operator==( rhs );
  17117. }
  17118. #endif
  17119. public:
  17120. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemoryInfo;
  17121. const void* pNext = {};
  17122. VULKAN_HPP_NAMESPACE::Image image = {};
  17123. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  17124. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
  17125. };
  17126. static_assert( sizeof( BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
  17127. static_assert( std::is_standard_layout<BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
  17128. template <>
  17129. struct CppType<StructureType, StructureType::eBindImageMemoryInfo>
  17130. {
  17131. using Type = BindImageMemoryInfo;
  17132. };
  17133. using BindImageMemoryInfoKHR = BindImageMemoryInfo;
  17134. struct BindImageMemorySwapchainInfoKHR
  17135. {
  17136. static const bool allowDuplicate = false;
  17137. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImageMemorySwapchainInfoKHR;
  17138. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17139. VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}, uint32_t imageIndex_ = {}) VULKAN_HPP_NOEXCEPT
  17140. : swapchain( swapchain_ ), imageIndex( imageIndex_ )
  17141. {}
  17142. VULKAN_HPP_CONSTEXPR BindImageMemorySwapchainInfoKHR( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17143. BindImageMemorySwapchainInfoKHR( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  17144. : BindImageMemorySwapchainInfoKHR( *reinterpret_cast<BindImageMemorySwapchainInfoKHR const *>( &rhs ) )
  17145. {}
  17146. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17147. VULKAN_HPP_CONSTEXPR_14 BindImageMemorySwapchainInfoKHR & operator=( BindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17148. BindImageMemorySwapchainInfoKHR & operator=( VkBindImageMemorySwapchainInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  17149. {
  17150. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR const *>( &rhs );
  17151. return *this;
  17152. }
  17153. BindImageMemorySwapchainInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  17154. {
  17155. pNext = pNext_;
  17156. return *this;
  17157. }
  17158. BindImageMemorySwapchainInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
  17159. {
  17160. swapchain = swapchain_;
  17161. return *this;
  17162. }
  17163. BindImageMemorySwapchainInfoKHR & setImageIndex( uint32_t imageIndex_ ) VULKAN_HPP_NOEXCEPT
  17164. {
  17165. imageIndex = imageIndex_;
  17166. return *this;
  17167. }
  17168. operator VkBindImageMemorySwapchainInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  17169. {
  17170. return *reinterpret_cast<const VkBindImageMemorySwapchainInfoKHR*>( this );
  17171. }
  17172. operator VkBindImageMemorySwapchainInfoKHR &() VULKAN_HPP_NOEXCEPT
  17173. {
  17174. return *reinterpret_cast<VkBindImageMemorySwapchainInfoKHR*>( this );
  17175. }
  17176. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17177. auto operator<=>( BindImageMemorySwapchainInfoKHR const& ) const = default;
  17178. #else
  17179. bool operator==( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  17180. {
  17181. return ( sType == rhs.sType )
  17182. && ( pNext == rhs.pNext )
  17183. && ( swapchain == rhs.swapchain )
  17184. && ( imageIndex == rhs.imageIndex );
  17185. }
  17186. bool operator!=( BindImageMemorySwapchainInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  17187. {
  17188. return !operator==( rhs );
  17189. }
  17190. #endif
  17191. public:
  17192. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImageMemorySwapchainInfoKHR;
  17193. const void* pNext = {};
  17194. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
  17195. uint32_t imageIndex = {};
  17196. };
  17197. static_assert( sizeof( BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ), "struct and wrapper have different size!" );
  17198. static_assert( std::is_standard_layout<BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
  17199. template <>
  17200. struct CppType<StructureType, StructureType::eBindImageMemorySwapchainInfoKHR>
  17201. {
  17202. using Type = BindImageMemorySwapchainInfoKHR;
  17203. };
  17204. struct BindImagePlaneMemoryInfo
  17205. {
  17206. static const bool allowDuplicate = false;
  17207. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindImagePlaneMemoryInfo;
  17208. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17209. VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
  17210. : planeAspect( planeAspect_ )
  17211. {}
  17212. VULKAN_HPP_CONSTEXPR BindImagePlaneMemoryInfo( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17213. BindImagePlaneMemoryInfo( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17214. : BindImagePlaneMemoryInfo( *reinterpret_cast<BindImagePlaneMemoryInfo const *>( &rhs ) )
  17215. {}
  17216. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17217. VULKAN_HPP_CONSTEXPR_14 BindImagePlaneMemoryInfo & operator=( BindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17218. BindImagePlaneMemoryInfo & operator=( VkBindImagePlaneMemoryInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17219. {
  17220. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo const *>( &rhs );
  17221. return *this;
  17222. }
  17223. BindImagePlaneMemoryInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  17224. {
  17225. pNext = pNext_;
  17226. return *this;
  17227. }
  17228. BindImagePlaneMemoryInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
  17229. {
  17230. planeAspect = planeAspect_;
  17231. return *this;
  17232. }
  17233. operator VkBindImagePlaneMemoryInfo const&() const VULKAN_HPP_NOEXCEPT
  17234. {
  17235. return *reinterpret_cast<const VkBindImagePlaneMemoryInfo*>( this );
  17236. }
  17237. operator VkBindImagePlaneMemoryInfo &() VULKAN_HPP_NOEXCEPT
  17238. {
  17239. return *reinterpret_cast<VkBindImagePlaneMemoryInfo*>( this );
  17240. }
  17241. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17242. auto operator<=>( BindImagePlaneMemoryInfo const& ) const = default;
  17243. #else
  17244. bool operator==( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17245. {
  17246. return ( sType == rhs.sType )
  17247. && ( pNext == rhs.pNext )
  17248. && ( planeAspect == rhs.planeAspect );
  17249. }
  17250. bool operator!=( BindImagePlaneMemoryInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17251. {
  17252. return !operator==( rhs );
  17253. }
  17254. #endif
  17255. public:
  17256. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindImagePlaneMemoryInfo;
  17257. const void* pNext = {};
  17258. VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
  17259. };
  17260. static_assert( sizeof( BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ), "struct and wrapper have different size!" );
  17261. static_assert( std::is_standard_layout<BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
  17262. template <>
  17263. struct CppType<StructureType, StructureType::eBindImagePlaneMemoryInfo>
  17264. {
  17265. using Type = BindImagePlaneMemoryInfo;
  17266. };
  17267. using BindImagePlaneMemoryInfoKHR = BindImagePlaneMemoryInfo;
  17268. struct BindIndexBufferIndirectCommandNV
  17269. {
  17270. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17271. VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, VULKAN_HPP_NAMESPACE::IndexType indexType_ = VULKAN_HPP_NAMESPACE::IndexType::eUint16) VULKAN_HPP_NOEXCEPT
  17272. : bufferAddress( bufferAddress_ ), size( size_ ), indexType( indexType_ )
  17273. {}
  17274. VULKAN_HPP_CONSTEXPR BindIndexBufferIndirectCommandNV( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17275. BindIndexBufferIndirectCommandNV( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  17276. : BindIndexBufferIndirectCommandNV( *reinterpret_cast<BindIndexBufferIndirectCommandNV const *>( &rhs ) )
  17277. {}
  17278. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17279. VULKAN_HPP_CONSTEXPR_14 BindIndexBufferIndirectCommandNV & operator=( BindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17280. BindIndexBufferIndirectCommandNV & operator=( VkBindIndexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  17281. {
  17282. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV const *>( &rhs );
  17283. return *this;
  17284. }
  17285. BindIndexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
  17286. {
  17287. bufferAddress = bufferAddress_;
  17288. return *this;
  17289. }
  17290. BindIndexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
  17291. {
  17292. size = size_;
  17293. return *this;
  17294. }
  17295. BindIndexBufferIndirectCommandNV & setIndexType( VULKAN_HPP_NAMESPACE::IndexType indexType_ ) VULKAN_HPP_NOEXCEPT
  17296. {
  17297. indexType = indexType_;
  17298. return *this;
  17299. }
  17300. operator VkBindIndexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
  17301. {
  17302. return *reinterpret_cast<const VkBindIndexBufferIndirectCommandNV*>( this );
  17303. }
  17304. operator VkBindIndexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
  17305. {
  17306. return *reinterpret_cast<VkBindIndexBufferIndirectCommandNV*>( this );
  17307. }
  17308. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17309. auto operator<=>( BindIndexBufferIndirectCommandNV const& ) const = default;
  17310. #else
  17311. bool operator==( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  17312. {
  17313. return ( bufferAddress == rhs.bufferAddress )
  17314. && ( size == rhs.size )
  17315. && ( indexType == rhs.indexType );
  17316. }
  17317. bool operator!=( BindIndexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  17318. {
  17319. return !operator==( rhs );
  17320. }
  17321. #endif
  17322. public:
  17323. VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
  17324. uint32_t size = {};
  17325. VULKAN_HPP_NAMESPACE::IndexType indexType = VULKAN_HPP_NAMESPACE::IndexType::eUint16;
  17326. };
  17327. static_assert( sizeof( BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
  17328. static_assert( std::is_standard_layout<BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
  17329. struct BindShaderGroupIndirectCommandNV
  17330. {
  17331. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17332. VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV(uint32_t groupIndex_ = {}) VULKAN_HPP_NOEXCEPT
  17333. : groupIndex( groupIndex_ )
  17334. {}
  17335. VULKAN_HPP_CONSTEXPR BindShaderGroupIndirectCommandNV( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17336. BindShaderGroupIndirectCommandNV( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  17337. : BindShaderGroupIndirectCommandNV( *reinterpret_cast<BindShaderGroupIndirectCommandNV const *>( &rhs ) )
  17338. {}
  17339. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17340. VULKAN_HPP_CONSTEXPR_14 BindShaderGroupIndirectCommandNV & operator=( BindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17341. BindShaderGroupIndirectCommandNV & operator=( VkBindShaderGroupIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  17342. {
  17343. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV const *>( &rhs );
  17344. return *this;
  17345. }
  17346. BindShaderGroupIndirectCommandNV & setGroupIndex( uint32_t groupIndex_ ) VULKAN_HPP_NOEXCEPT
  17347. {
  17348. groupIndex = groupIndex_;
  17349. return *this;
  17350. }
  17351. operator VkBindShaderGroupIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
  17352. {
  17353. return *reinterpret_cast<const VkBindShaderGroupIndirectCommandNV*>( this );
  17354. }
  17355. operator VkBindShaderGroupIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
  17356. {
  17357. return *reinterpret_cast<VkBindShaderGroupIndirectCommandNV*>( this );
  17358. }
  17359. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17360. auto operator<=>( BindShaderGroupIndirectCommandNV const& ) const = default;
  17361. #else
  17362. bool operator==( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  17363. {
  17364. return ( groupIndex == rhs.groupIndex );
  17365. }
  17366. bool operator!=( BindShaderGroupIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  17367. {
  17368. return !operator==( rhs );
  17369. }
  17370. #endif
  17371. public:
  17372. uint32_t groupIndex = {};
  17373. };
  17374. static_assert( sizeof( BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ), "struct and wrapper have different size!" );
  17375. static_assert( std::is_standard_layout<BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
  17376. struct SparseMemoryBind
  17377. {
  17378. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17379. VULKAN_HPP_CONSTEXPR SparseMemoryBind(VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  17380. : resourceOffset( resourceOffset_ ), size( size_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
  17381. {}
  17382. VULKAN_HPP_CONSTEXPR SparseMemoryBind( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17383. SparseMemoryBind( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
  17384. : SparseMemoryBind( *reinterpret_cast<SparseMemoryBind const *>( &rhs ) )
  17385. {}
  17386. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17387. VULKAN_HPP_CONSTEXPR_14 SparseMemoryBind & operator=( SparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17388. SparseMemoryBind & operator=( VkSparseMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
  17389. {
  17390. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseMemoryBind const *>( &rhs );
  17391. return *this;
  17392. }
  17393. SparseMemoryBind & setResourceOffset( VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset_ ) VULKAN_HPP_NOEXCEPT
  17394. {
  17395. resourceOffset = resourceOffset_;
  17396. return *this;
  17397. }
  17398. SparseMemoryBind & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  17399. {
  17400. size = size_;
  17401. return *this;
  17402. }
  17403. SparseMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  17404. {
  17405. memory = memory_;
  17406. return *this;
  17407. }
  17408. SparseMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
  17409. {
  17410. memoryOffset = memoryOffset_;
  17411. return *this;
  17412. }
  17413. SparseMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
  17414. {
  17415. flags = flags_;
  17416. return *this;
  17417. }
  17418. operator VkSparseMemoryBind const&() const VULKAN_HPP_NOEXCEPT
  17419. {
  17420. return *reinterpret_cast<const VkSparseMemoryBind*>( this );
  17421. }
  17422. operator VkSparseMemoryBind &() VULKAN_HPP_NOEXCEPT
  17423. {
  17424. return *reinterpret_cast<VkSparseMemoryBind*>( this );
  17425. }
  17426. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17427. auto operator<=>( SparseMemoryBind const& ) const = default;
  17428. #else
  17429. bool operator==( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
  17430. {
  17431. return ( resourceOffset == rhs.resourceOffset )
  17432. && ( size == rhs.size )
  17433. && ( memory == rhs.memory )
  17434. && ( memoryOffset == rhs.memoryOffset )
  17435. && ( flags == rhs.flags );
  17436. }
  17437. bool operator!=( SparseMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
  17438. {
  17439. return !operator==( rhs );
  17440. }
  17441. #endif
  17442. public:
  17443. VULKAN_HPP_NAMESPACE::DeviceSize resourceOffset = {};
  17444. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  17445. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  17446. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
  17447. VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
  17448. };
  17449. static_assert( sizeof( SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
  17450. static_assert( std::is_standard_layout<SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
  17451. struct SparseBufferMemoryBindInfo
  17452. {
  17453. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17454. VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
  17455. : buffer( buffer_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
  17456. {}
  17457. VULKAN_HPP_CONSTEXPR SparseBufferMemoryBindInfo( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17458. SparseBufferMemoryBindInfo( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17459. : SparseBufferMemoryBindInfo( *reinterpret_cast<SparseBufferMemoryBindInfo const *>( &rhs ) )
  17460. {}
  17461. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17462. SparseBufferMemoryBindInfo( VULKAN_HPP_NAMESPACE::Buffer buffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
  17463. : buffer( buffer_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
  17464. {}
  17465. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17466. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17467. VULKAN_HPP_CONSTEXPR_14 SparseBufferMemoryBindInfo & operator=( SparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17468. SparseBufferMemoryBindInfo & operator=( VkSparseBufferMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17469. {
  17470. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo const *>( &rhs );
  17471. return *this;
  17472. }
  17473. SparseBufferMemoryBindInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  17474. {
  17475. buffer = buffer_;
  17476. return *this;
  17477. }
  17478. SparseBufferMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
  17479. {
  17480. bindCount = bindCount_;
  17481. return *this;
  17482. }
  17483. SparseBufferMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
  17484. {
  17485. pBinds = pBinds_;
  17486. return *this;
  17487. }
  17488. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17489. SparseBufferMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
  17490. {
  17491. bindCount = static_cast<uint32_t>( binds_.size() );
  17492. pBinds = binds_.data();
  17493. return *this;
  17494. }
  17495. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17496. operator VkSparseBufferMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
  17497. {
  17498. return *reinterpret_cast<const VkSparseBufferMemoryBindInfo*>( this );
  17499. }
  17500. operator VkSparseBufferMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
  17501. {
  17502. return *reinterpret_cast<VkSparseBufferMemoryBindInfo*>( this );
  17503. }
  17504. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17505. auto operator<=>( SparseBufferMemoryBindInfo const& ) const = default;
  17506. #else
  17507. bool operator==( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17508. {
  17509. return ( buffer == rhs.buffer )
  17510. && ( bindCount == rhs.bindCount )
  17511. && ( pBinds == rhs.pBinds );
  17512. }
  17513. bool operator!=( SparseBufferMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17514. {
  17515. return !operator==( rhs );
  17516. }
  17517. #endif
  17518. public:
  17519. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  17520. uint32_t bindCount = {};
  17521. const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
  17522. };
  17523. static_assert( sizeof( SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ), "struct and wrapper have different size!" );
  17524. static_assert( std::is_standard_layout<SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
  17525. struct SparseImageOpaqueMemoryBindInfo
  17526. {
  17527. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17528. VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
  17529. : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
  17530. {}
  17531. VULKAN_HPP_CONSTEXPR SparseImageOpaqueMemoryBindInfo( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17532. SparseImageOpaqueMemoryBindInfo( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17533. : SparseImageOpaqueMemoryBindInfo( *reinterpret_cast<SparseImageOpaqueMemoryBindInfo const *>( &rhs ) )
  17534. {}
  17535. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17536. SparseImageOpaqueMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ )
  17537. : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
  17538. {}
  17539. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17540. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17541. VULKAN_HPP_CONSTEXPR_14 SparseImageOpaqueMemoryBindInfo & operator=( SparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17542. SparseImageOpaqueMemoryBindInfo & operator=( VkSparseImageOpaqueMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17543. {
  17544. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo const *>( &rhs );
  17545. return *this;
  17546. }
  17547. SparseImageOpaqueMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  17548. {
  17549. image = image_;
  17550. return *this;
  17551. }
  17552. SparseImageOpaqueMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
  17553. {
  17554. bindCount = bindCount_;
  17555. return *this;
  17556. }
  17557. SparseImageOpaqueMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
  17558. {
  17559. pBinds = pBinds_;
  17560. return *this;
  17561. }
  17562. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17563. SparseImageOpaqueMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
  17564. {
  17565. bindCount = static_cast<uint32_t>( binds_.size() );
  17566. pBinds = binds_.data();
  17567. return *this;
  17568. }
  17569. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17570. operator VkSparseImageOpaqueMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
  17571. {
  17572. return *reinterpret_cast<const VkSparseImageOpaqueMemoryBindInfo*>( this );
  17573. }
  17574. operator VkSparseImageOpaqueMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
  17575. {
  17576. return *reinterpret_cast<VkSparseImageOpaqueMemoryBindInfo*>( this );
  17577. }
  17578. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17579. auto operator<=>( SparseImageOpaqueMemoryBindInfo const& ) const = default;
  17580. #else
  17581. bool operator==( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17582. {
  17583. return ( image == rhs.image )
  17584. && ( bindCount == rhs.bindCount )
  17585. && ( pBinds == rhs.pBinds );
  17586. }
  17587. bool operator!=( SparseImageOpaqueMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17588. {
  17589. return !operator==( rhs );
  17590. }
  17591. #endif
  17592. public:
  17593. VULKAN_HPP_NAMESPACE::Image image = {};
  17594. uint32_t bindCount = {};
  17595. const VULKAN_HPP_NAMESPACE::SparseMemoryBind* pBinds = {};
  17596. };
  17597. static_assert( sizeof( SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ), "struct and wrapper have different size!" );
  17598. static_assert( std::is_standard_layout<SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
  17599. struct ImageSubresource
  17600. {
  17601. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17602. VULKAN_HPP_CONSTEXPR ImageSubresource(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t arrayLayer_ = {}) VULKAN_HPP_NOEXCEPT
  17603. : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), arrayLayer( arrayLayer_ )
  17604. {}
  17605. VULKAN_HPP_CONSTEXPR ImageSubresource( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17606. ImageSubresource( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
  17607. : ImageSubresource( *reinterpret_cast<ImageSubresource const *>( &rhs ) )
  17608. {}
  17609. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17610. VULKAN_HPP_CONSTEXPR_14 ImageSubresource & operator=( ImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17611. ImageSubresource & operator=( VkImageSubresource const & rhs ) VULKAN_HPP_NOEXCEPT
  17612. {
  17613. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresource const *>( &rhs );
  17614. return *this;
  17615. }
  17616. ImageSubresource & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  17617. {
  17618. aspectMask = aspectMask_;
  17619. return *this;
  17620. }
  17621. ImageSubresource & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
  17622. {
  17623. mipLevel = mipLevel_;
  17624. return *this;
  17625. }
  17626. ImageSubresource & setArrayLayer( uint32_t arrayLayer_ ) VULKAN_HPP_NOEXCEPT
  17627. {
  17628. arrayLayer = arrayLayer_;
  17629. return *this;
  17630. }
  17631. operator VkImageSubresource const&() const VULKAN_HPP_NOEXCEPT
  17632. {
  17633. return *reinterpret_cast<const VkImageSubresource*>( this );
  17634. }
  17635. operator VkImageSubresource &() VULKAN_HPP_NOEXCEPT
  17636. {
  17637. return *reinterpret_cast<VkImageSubresource*>( this );
  17638. }
  17639. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17640. auto operator<=>( ImageSubresource const& ) const = default;
  17641. #else
  17642. bool operator==( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
  17643. {
  17644. return ( aspectMask == rhs.aspectMask )
  17645. && ( mipLevel == rhs.mipLevel )
  17646. && ( arrayLayer == rhs.arrayLayer );
  17647. }
  17648. bool operator!=( ImageSubresource const& rhs ) const VULKAN_HPP_NOEXCEPT
  17649. {
  17650. return !operator==( rhs );
  17651. }
  17652. #endif
  17653. public:
  17654. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  17655. uint32_t mipLevel = {};
  17656. uint32_t arrayLayer = {};
  17657. };
  17658. static_assert( sizeof( ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
  17659. static_assert( std::is_standard_layout<ImageSubresource>::value, "struct wrapper is not a standard layout!" );
  17660. struct Offset3D
  17661. {
  17662. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17663. VULKAN_HPP_CONSTEXPR Offset3D(int32_t x_ = {}, int32_t y_ = {}, int32_t z_ = {}) VULKAN_HPP_NOEXCEPT
  17664. : x( x_ ), y( y_ ), z( z_ )
  17665. {}
  17666. VULKAN_HPP_CONSTEXPR Offset3D( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17667. Offset3D( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
  17668. : Offset3D( *reinterpret_cast<Offset3D const *>( &rhs ) )
  17669. {}
  17670. explicit Offset3D( Offset2D const& offset2D, int32_t z_ = {} )
  17671. : x( offset2D.x )
  17672. , y( offset2D.y )
  17673. , z( z_ )
  17674. {}
  17675. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17676. VULKAN_HPP_CONSTEXPR_14 Offset3D & operator=( Offset3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17677. Offset3D & operator=( VkOffset3D const & rhs ) VULKAN_HPP_NOEXCEPT
  17678. {
  17679. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Offset3D const *>( &rhs );
  17680. return *this;
  17681. }
  17682. Offset3D & setX( int32_t x_ ) VULKAN_HPP_NOEXCEPT
  17683. {
  17684. x = x_;
  17685. return *this;
  17686. }
  17687. Offset3D & setY( int32_t y_ ) VULKAN_HPP_NOEXCEPT
  17688. {
  17689. y = y_;
  17690. return *this;
  17691. }
  17692. Offset3D & setZ( int32_t z_ ) VULKAN_HPP_NOEXCEPT
  17693. {
  17694. z = z_;
  17695. return *this;
  17696. }
  17697. operator VkOffset3D const&() const VULKAN_HPP_NOEXCEPT
  17698. {
  17699. return *reinterpret_cast<const VkOffset3D*>( this );
  17700. }
  17701. operator VkOffset3D &() VULKAN_HPP_NOEXCEPT
  17702. {
  17703. return *reinterpret_cast<VkOffset3D*>( this );
  17704. }
  17705. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17706. auto operator<=>( Offset3D const& ) const = default;
  17707. #else
  17708. bool operator==( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
  17709. {
  17710. return ( x == rhs.x )
  17711. && ( y == rhs.y )
  17712. && ( z == rhs.z );
  17713. }
  17714. bool operator!=( Offset3D const& rhs ) const VULKAN_HPP_NOEXCEPT
  17715. {
  17716. return !operator==( rhs );
  17717. }
  17718. #endif
  17719. public:
  17720. int32_t x = {};
  17721. int32_t y = {};
  17722. int32_t z = {};
  17723. };
  17724. static_assert( sizeof( Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
  17725. static_assert( std::is_standard_layout<Offset3D>::value, "struct wrapper is not a standard layout!" );
  17726. struct Extent3D
  17727. {
  17728. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17729. VULKAN_HPP_CONSTEXPR Extent3D(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
  17730. : width( width_ ), height( height_ ), depth( depth_ )
  17731. {}
  17732. VULKAN_HPP_CONSTEXPR Extent3D( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17733. Extent3D( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
  17734. : Extent3D( *reinterpret_cast<Extent3D const *>( &rhs ) )
  17735. {}
  17736. explicit Extent3D( Extent2D const& extent2D, uint32_t depth_ = {} )
  17737. : width( extent2D.width )
  17738. , height( extent2D.height )
  17739. , depth( depth_ )
  17740. {}
  17741. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17742. VULKAN_HPP_CONSTEXPR_14 Extent3D & operator=( Extent3D const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17743. Extent3D & operator=( VkExtent3D const & rhs ) VULKAN_HPP_NOEXCEPT
  17744. {
  17745. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Extent3D const *>( &rhs );
  17746. return *this;
  17747. }
  17748. Extent3D & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
  17749. {
  17750. width = width_;
  17751. return *this;
  17752. }
  17753. Extent3D & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
  17754. {
  17755. height = height_;
  17756. return *this;
  17757. }
  17758. Extent3D & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
  17759. {
  17760. depth = depth_;
  17761. return *this;
  17762. }
  17763. operator VkExtent3D const&() const VULKAN_HPP_NOEXCEPT
  17764. {
  17765. return *reinterpret_cast<const VkExtent3D*>( this );
  17766. }
  17767. operator VkExtent3D &() VULKAN_HPP_NOEXCEPT
  17768. {
  17769. return *reinterpret_cast<VkExtent3D*>( this );
  17770. }
  17771. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17772. auto operator<=>( Extent3D const& ) const = default;
  17773. #else
  17774. bool operator==( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
  17775. {
  17776. return ( width == rhs.width )
  17777. && ( height == rhs.height )
  17778. && ( depth == rhs.depth );
  17779. }
  17780. bool operator!=( Extent3D const& rhs ) const VULKAN_HPP_NOEXCEPT
  17781. {
  17782. return !operator==( rhs );
  17783. }
  17784. #endif
  17785. public:
  17786. uint32_t width = {};
  17787. uint32_t height = {};
  17788. uint32_t depth = {};
  17789. };
  17790. static_assert( sizeof( Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
  17791. static_assert( std::is_standard_layout<Extent3D>::value, "struct wrapper is not a standard layout!" );
  17792. struct SparseImageMemoryBind
  17793. {
  17794. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17795. VULKAN_HPP_CONSTEXPR SparseImageMemoryBind(VULKAN_HPP_NAMESPACE::ImageSubresource subresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ = {}, VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  17796. : subresource( subresource_ ), offset( offset_ ), extent( extent_ ), memory( memory_ ), memoryOffset( memoryOffset_ ), flags( flags_ )
  17797. {}
  17798. VULKAN_HPP_CONSTEXPR SparseImageMemoryBind( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17799. SparseImageMemoryBind( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
  17800. : SparseImageMemoryBind( *reinterpret_cast<SparseImageMemoryBind const *>( &rhs ) )
  17801. {}
  17802. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17803. VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBind & operator=( SparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17804. SparseImageMemoryBind & operator=( VkSparseImageMemoryBind const & rhs ) VULKAN_HPP_NOEXCEPT
  17805. {
  17806. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind const *>( &rhs );
  17807. return *this;
  17808. }
  17809. SparseImageMemoryBind & setSubresource( VULKAN_HPP_NAMESPACE::ImageSubresource const & subresource_ ) VULKAN_HPP_NOEXCEPT
  17810. {
  17811. subresource = subresource_;
  17812. return *this;
  17813. }
  17814. SparseImageMemoryBind & setOffset( VULKAN_HPP_NAMESPACE::Offset3D const & offset_ ) VULKAN_HPP_NOEXCEPT
  17815. {
  17816. offset = offset_;
  17817. return *this;
  17818. }
  17819. SparseImageMemoryBind & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  17820. {
  17821. extent = extent_;
  17822. return *this;
  17823. }
  17824. SparseImageMemoryBind & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  17825. {
  17826. memory = memory_;
  17827. return *this;
  17828. }
  17829. SparseImageMemoryBind & setMemoryOffset( VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset_ ) VULKAN_HPP_NOEXCEPT
  17830. {
  17831. memoryOffset = memoryOffset_;
  17832. return *this;
  17833. }
  17834. SparseImageMemoryBind & setFlags( VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags_ ) VULKAN_HPP_NOEXCEPT
  17835. {
  17836. flags = flags_;
  17837. return *this;
  17838. }
  17839. operator VkSparseImageMemoryBind const&() const VULKAN_HPP_NOEXCEPT
  17840. {
  17841. return *reinterpret_cast<const VkSparseImageMemoryBind*>( this );
  17842. }
  17843. operator VkSparseImageMemoryBind &() VULKAN_HPP_NOEXCEPT
  17844. {
  17845. return *reinterpret_cast<VkSparseImageMemoryBind*>( this );
  17846. }
  17847. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17848. auto operator<=>( SparseImageMemoryBind const& ) const = default;
  17849. #else
  17850. bool operator==( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
  17851. {
  17852. return ( subresource == rhs.subresource )
  17853. && ( offset == rhs.offset )
  17854. && ( extent == rhs.extent )
  17855. && ( memory == rhs.memory )
  17856. && ( memoryOffset == rhs.memoryOffset )
  17857. && ( flags == rhs.flags );
  17858. }
  17859. bool operator!=( SparseImageMemoryBind const& rhs ) const VULKAN_HPP_NOEXCEPT
  17860. {
  17861. return !operator==( rhs );
  17862. }
  17863. #endif
  17864. public:
  17865. VULKAN_HPP_NAMESPACE::ImageSubresource subresource = {};
  17866. VULKAN_HPP_NAMESPACE::Offset3D offset = {};
  17867. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  17868. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  17869. VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset = {};
  17870. VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags flags = {};
  17871. };
  17872. static_assert( sizeof( SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ), "struct and wrapper have different size!" );
  17873. static_assert( std::is_standard_layout<SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
  17874. struct SparseImageMemoryBindInfo
  17875. {
  17876. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17877. VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, uint32_t bindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ = {}) VULKAN_HPP_NOEXCEPT
  17878. : image( image_ ), bindCount( bindCount_ ), pBinds( pBinds_ )
  17879. {}
  17880. VULKAN_HPP_CONSTEXPR SparseImageMemoryBindInfo( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17881. SparseImageMemoryBindInfo( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17882. : SparseImageMemoryBindInfo( *reinterpret_cast<SparseImageMemoryBindInfo const *>( &rhs ) )
  17883. {}
  17884. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17885. SparseImageMemoryBindInfo( VULKAN_HPP_NAMESPACE::Image image_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ )
  17886. : image( image_ ), bindCount( static_cast<uint32_t>( binds_.size() ) ), pBinds( binds_.data() )
  17887. {}
  17888. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17889. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17890. VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryBindInfo & operator=( SparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17891. SparseImageMemoryBindInfo & operator=( VkSparseImageMemoryBindInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17892. {
  17893. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo const *>( &rhs );
  17894. return *this;
  17895. }
  17896. SparseImageMemoryBindInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  17897. {
  17898. image = image_;
  17899. return *this;
  17900. }
  17901. SparseImageMemoryBindInfo & setBindCount( uint32_t bindCount_ ) VULKAN_HPP_NOEXCEPT
  17902. {
  17903. bindCount = bindCount_;
  17904. return *this;
  17905. }
  17906. SparseImageMemoryBindInfo & setPBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds_ ) VULKAN_HPP_NOEXCEPT
  17907. {
  17908. pBinds = pBinds_;
  17909. return *this;
  17910. }
  17911. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17912. SparseImageMemoryBindInfo & setBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind> const & binds_ ) VULKAN_HPP_NOEXCEPT
  17913. {
  17914. bindCount = static_cast<uint32_t>( binds_.size() );
  17915. pBinds = binds_.data();
  17916. return *this;
  17917. }
  17918. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17919. operator VkSparseImageMemoryBindInfo const&() const VULKAN_HPP_NOEXCEPT
  17920. {
  17921. return *reinterpret_cast<const VkSparseImageMemoryBindInfo*>( this );
  17922. }
  17923. operator VkSparseImageMemoryBindInfo &() VULKAN_HPP_NOEXCEPT
  17924. {
  17925. return *reinterpret_cast<VkSparseImageMemoryBindInfo*>( this );
  17926. }
  17927. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  17928. auto operator<=>( SparseImageMemoryBindInfo const& ) const = default;
  17929. #else
  17930. bool operator==( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17931. {
  17932. return ( image == rhs.image )
  17933. && ( bindCount == rhs.bindCount )
  17934. && ( pBinds == rhs.pBinds );
  17935. }
  17936. bool operator!=( SparseImageMemoryBindInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  17937. {
  17938. return !operator==( rhs );
  17939. }
  17940. #endif
  17941. public:
  17942. VULKAN_HPP_NAMESPACE::Image image = {};
  17943. uint32_t bindCount = {};
  17944. const VULKAN_HPP_NAMESPACE::SparseImageMemoryBind* pBinds = {};
  17945. };
  17946. static_assert( sizeof( SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ), "struct and wrapper have different size!" );
  17947. static_assert( std::is_standard_layout<SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
  17948. struct BindSparseInfo
  17949. {
  17950. static const bool allowDuplicate = false;
  17951. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBindSparseInfo;
  17952. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17953. VULKAN_HPP_CONSTEXPR BindSparseInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t bufferBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ = {}, uint32_t imageOpaqueBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ = {}, uint32_t imageBindCount_ = {}, const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
  17954. : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), bufferBindCount( bufferBindCount_ ), pBufferBinds( pBufferBinds_ ), imageOpaqueBindCount( imageOpaqueBindCount_ ), pImageOpaqueBinds( pImageOpaqueBinds_ ), imageBindCount( imageBindCount_ ), pImageBinds( pImageBinds_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
  17955. {}
  17956. VULKAN_HPP_CONSTEXPR BindSparseInfo( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17957. BindSparseInfo( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17958. : BindSparseInfo( *reinterpret_cast<BindSparseInfo const *>( &rhs ) )
  17959. {}
  17960. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17961. BindSparseInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
  17962. : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), bufferBindCount( static_cast<uint32_t>( bufferBinds_.size() ) ), pBufferBinds( bufferBinds_.data() ), imageOpaqueBindCount( static_cast<uint32_t>( imageOpaqueBinds_.size() ) ), pImageOpaqueBinds( imageOpaqueBinds_.data() ), imageBindCount( static_cast<uint32_t>( imageBinds_.size() ) ), pImageBinds( imageBinds_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
  17963. {}
  17964. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17965. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  17966. VULKAN_HPP_CONSTEXPR_14 BindSparseInfo & operator=( BindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  17967. BindSparseInfo & operator=( VkBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  17968. {
  17969. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindSparseInfo const *>( &rhs );
  17970. return *this;
  17971. }
  17972. BindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  17973. {
  17974. pNext = pNext_;
  17975. return *this;
  17976. }
  17977. BindSparseInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  17978. {
  17979. waitSemaphoreCount = waitSemaphoreCount_;
  17980. return *this;
  17981. }
  17982. BindSparseInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  17983. {
  17984. pWaitSemaphores = pWaitSemaphores_;
  17985. return *this;
  17986. }
  17987. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17988. BindSparseInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  17989. {
  17990. waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
  17991. pWaitSemaphores = waitSemaphores_.data();
  17992. return *this;
  17993. }
  17994. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  17995. BindSparseInfo & setBufferBindCount( uint32_t bufferBindCount_ ) VULKAN_HPP_NOEXCEPT
  17996. {
  17997. bufferBindCount = bufferBindCount_;
  17998. return *this;
  17999. }
  18000. BindSparseInfo & setPBufferBinds( const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds_ ) VULKAN_HPP_NOEXCEPT
  18001. {
  18002. pBufferBinds = pBufferBinds_;
  18003. return *this;
  18004. }
  18005. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18006. BindSparseInfo & setBufferBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo> const & bufferBinds_ ) VULKAN_HPP_NOEXCEPT
  18007. {
  18008. bufferBindCount = static_cast<uint32_t>( bufferBinds_.size() );
  18009. pBufferBinds = bufferBinds_.data();
  18010. return *this;
  18011. }
  18012. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18013. BindSparseInfo & setImageOpaqueBindCount( uint32_t imageOpaqueBindCount_ ) VULKAN_HPP_NOEXCEPT
  18014. {
  18015. imageOpaqueBindCount = imageOpaqueBindCount_;
  18016. return *this;
  18017. }
  18018. BindSparseInfo & setPImageOpaqueBinds( const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
  18019. {
  18020. pImageOpaqueBinds = pImageOpaqueBinds_;
  18021. return *this;
  18022. }
  18023. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18024. BindSparseInfo & setImageOpaqueBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo> const & imageOpaqueBinds_ ) VULKAN_HPP_NOEXCEPT
  18025. {
  18026. imageOpaqueBindCount = static_cast<uint32_t>( imageOpaqueBinds_.size() );
  18027. pImageOpaqueBinds = imageOpaqueBinds_.data();
  18028. return *this;
  18029. }
  18030. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18031. BindSparseInfo & setImageBindCount( uint32_t imageBindCount_ ) VULKAN_HPP_NOEXCEPT
  18032. {
  18033. imageBindCount = imageBindCount_;
  18034. return *this;
  18035. }
  18036. BindSparseInfo & setPImageBinds( const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds_ ) VULKAN_HPP_NOEXCEPT
  18037. {
  18038. pImageBinds = pImageBinds_;
  18039. return *this;
  18040. }
  18041. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18042. BindSparseInfo & setImageBinds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo> const & imageBinds_ ) VULKAN_HPP_NOEXCEPT
  18043. {
  18044. imageBindCount = static_cast<uint32_t>( imageBinds_.size() );
  18045. pImageBinds = imageBinds_.data();
  18046. return *this;
  18047. }
  18048. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18049. BindSparseInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  18050. {
  18051. signalSemaphoreCount = signalSemaphoreCount_;
  18052. return *this;
  18053. }
  18054. BindSparseInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
  18055. {
  18056. pSignalSemaphores = pSignalSemaphores_;
  18057. return *this;
  18058. }
  18059. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18060. BindSparseInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
  18061. {
  18062. signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
  18063. pSignalSemaphores = signalSemaphores_.data();
  18064. return *this;
  18065. }
  18066. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18067. operator VkBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
  18068. {
  18069. return *reinterpret_cast<const VkBindSparseInfo*>( this );
  18070. }
  18071. operator VkBindSparseInfo &() VULKAN_HPP_NOEXCEPT
  18072. {
  18073. return *reinterpret_cast<VkBindSparseInfo*>( this );
  18074. }
  18075. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18076. auto operator<=>( BindSparseInfo const& ) const = default;
  18077. #else
  18078. bool operator==( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18079. {
  18080. return ( sType == rhs.sType )
  18081. && ( pNext == rhs.pNext )
  18082. && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
  18083. && ( pWaitSemaphores == rhs.pWaitSemaphores )
  18084. && ( bufferBindCount == rhs.bufferBindCount )
  18085. && ( pBufferBinds == rhs.pBufferBinds )
  18086. && ( imageOpaqueBindCount == rhs.imageOpaqueBindCount )
  18087. && ( pImageOpaqueBinds == rhs.pImageOpaqueBinds )
  18088. && ( imageBindCount == rhs.imageBindCount )
  18089. && ( pImageBinds == rhs.pImageBinds )
  18090. && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
  18091. && ( pSignalSemaphores == rhs.pSignalSemaphores );
  18092. }
  18093. bool operator!=( BindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18094. {
  18095. return !operator==( rhs );
  18096. }
  18097. #endif
  18098. public:
  18099. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBindSparseInfo;
  18100. const void* pNext = {};
  18101. uint32_t waitSemaphoreCount = {};
  18102. const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
  18103. uint32_t bufferBindCount = {};
  18104. const VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo* pBufferBinds = {};
  18105. uint32_t imageOpaqueBindCount = {};
  18106. const VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds = {};
  18107. uint32_t imageBindCount = {};
  18108. const VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo* pImageBinds = {};
  18109. uint32_t signalSemaphoreCount = {};
  18110. const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
  18111. };
  18112. static_assert( sizeof( BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
  18113. static_assert( std::is_standard_layout<BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
  18114. template <>
  18115. struct CppType<StructureType, StructureType::eBindSparseInfo>
  18116. {
  18117. using Type = BindSparseInfo;
  18118. };
  18119. struct BindVertexBufferIndirectCommandNV
  18120. {
  18121. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18122. VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV(VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ = {}, uint32_t size_ = {}, uint32_t stride_ = {}) VULKAN_HPP_NOEXCEPT
  18123. : bufferAddress( bufferAddress_ ), size( size_ ), stride( stride_ )
  18124. {}
  18125. VULKAN_HPP_CONSTEXPR BindVertexBufferIndirectCommandNV( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18126. BindVertexBufferIndirectCommandNV( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  18127. : BindVertexBufferIndirectCommandNV( *reinterpret_cast<BindVertexBufferIndirectCommandNV const *>( &rhs ) )
  18128. {}
  18129. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18130. VULKAN_HPP_CONSTEXPR_14 BindVertexBufferIndirectCommandNV & operator=( BindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18131. BindVertexBufferIndirectCommandNV & operator=( VkBindVertexBufferIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  18132. {
  18133. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV const *>( &rhs );
  18134. return *this;
  18135. }
  18136. BindVertexBufferIndirectCommandNV & setBufferAddress( VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress_ ) VULKAN_HPP_NOEXCEPT
  18137. {
  18138. bufferAddress = bufferAddress_;
  18139. return *this;
  18140. }
  18141. BindVertexBufferIndirectCommandNV & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
  18142. {
  18143. size = size_;
  18144. return *this;
  18145. }
  18146. BindVertexBufferIndirectCommandNV & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
  18147. {
  18148. stride = stride_;
  18149. return *this;
  18150. }
  18151. operator VkBindVertexBufferIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
  18152. {
  18153. return *reinterpret_cast<const VkBindVertexBufferIndirectCommandNV*>( this );
  18154. }
  18155. operator VkBindVertexBufferIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
  18156. {
  18157. return *reinterpret_cast<VkBindVertexBufferIndirectCommandNV*>( this );
  18158. }
  18159. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18160. auto operator<=>( BindVertexBufferIndirectCommandNV const& ) const = default;
  18161. #else
  18162. bool operator==( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  18163. {
  18164. return ( bufferAddress == rhs.bufferAddress )
  18165. && ( size == rhs.size )
  18166. && ( stride == rhs.stride );
  18167. }
  18168. bool operator!=( BindVertexBufferIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  18169. {
  18170. return !operator==( rhs );
  18171. }
  18172. #endif
  18173. public:
  18174. VULKAN_HPP_NAMESPACE::DeviceAddress bufferAddress = {};
  18175. uint32_t size = {};
  18176. uint32_t stride = {};
  18177. };
  18178. static_assert( sizeof( BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ), "struct and wrapper have different size!" );
  18179. static_assert( std::is_standard_layout<BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
  18180. struct ImageSubresourceLayers
  18181. {
  18182. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18183. VULKAN_HPP_CONSTEXPR ImageSubresourceLayers(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t mipLevel_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
  18184. : aspectMask( aspectMask_ ), mipLevel( mipLevel_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
  18185. {}
  18186. VULKAN_HPP_CONSTEXPR ImageSubresourceLayers( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18187. ImageSubresourceLayers( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
  18188. : ImageSubresourceLayers( *reinterpret_cast<ImageSubresourceLayers const *>( &rhs ) )
  18189. {}
  18190. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18191. VULKAN_HPP_CONSTEXPR_14 ImageSubresourceLayers & operator=( ImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18192. ImageSubresourceLayers & operator=( VkImageSubresourceLayers const & rhs ) VULKAN_HPP_NOEXCEPT
  18193. {
  18194. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const *>( &rhs );
  18195. return *this;
  18196. }
  18197. ImageSubresourceLayers & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  18198. {
  18199. aspectMask = aspectMask_;
  18200. return *this;
  18201. }
  18202. ImageSubresourceLayers & setMipLevel( uint32_t mipLevel_ ) VULKAN_HPP_NOEXCEPT
  18203. {
  18204. mipLevel = mipLevel_;
  18205. return *this;
  18206. }
  18207. ImageSubresourceLayers & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
  18208. {
  18209. baseArrayLayer = baseArrayLayer_;
  18210. return *this;
  18211. }
  18212. ImageSubresourceLayers & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
  18213. {
  18214. layerCount = layerCount_;
  18215. return *this;
  18216. }
  18217. operator VkImageSubresourceLayers const&() const VULKAN_HPP_NOEXCEPT
  18218. {
  18219. return *reinterpret_cast<const VkImageSubresourceLayers*>( this );
  18220. }
  18221. operator VkImageSubresourceLayers &() VULKAN_HPP_NOEXCEPT
  18222. {
  18223. return *reinterpret_cast<VkImageSubresourceLayers*>( this );
  18224. }
  18225. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18226. auto operator<=>( ImageSubresourceLayers const& ) const = default;
  18227. #else
  18228. bool operator==( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
  18229. {
  18230. return ( aspectMask == rhs.aspectMask )
  18231. && ( mipLevel == rhs.mipLevel )
  18232. && ( baseArrayLayer == rhs.baseArrayLayer )
  18233. && ( layerCount == rhs.layerCount );
  18234. }
  18235. bool operator!=( ImageSubresourceLayers const& rhs ) const VULKAN_HPP_NOEXCEPT
  18236. {
  18237. return !operator==( rhs );
  18238. }
  18239. #endif
  18240. public:
  18241. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  18242. uint32_t mipLevel = {};
  18243. uint32_t baseArrayLayer = {};
  18244. uint32_t layerCount = {};
  18245. };
  18246. static_assert( sizeof( ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ), "struct and wrapper have different size!" );
  18247. static_assert( std::is_standard_layout<ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
  18248. struct ImageBlit2KHR
  18249. {
  18250. static const bool allowDuplicate = false;
  18251. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageBlit2KHR;
  18252. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18253. VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
  18254. : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
  18255. {}
  18256. VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18257. ImageBlit2KHR( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18258. : ImageBlit2KHR( *reinterpret_cast<ImageBlit2KHR const *>( &rhs ) )
  18259. {}
  18260. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18261. VULKAN_HPP_CONSTEXPR_14 ImageBlit2KHR & operator=( ImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18262. ImageBlit2KHR & operator=( VkImageBlit2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18263. {
  18264. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit2KHR const *>( &rhs );
  18265. return *this;
  18266. }
  18267. ImageBlit2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18268. {
  18269. pNext = pNext_;
  18270. return *this;
  18271. }
  18272. ImageBlit2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  18273. {
  18274. srcSubresource = srcSubresource_;
  18275. return *this;
  18276. }
  18277. ImageBlit2KHR & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
  18278. {
  18279. srcOffsets = srcOffsets_;
  18280. return *this;
  18281. }
  18282. ImageBlit2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  18283. {
  18284. dstSubresource = dstSubresource_;
  18285. return *this;
  18286. }
  18287. ImageBlit2KHR & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
  18288. {
  18289. dstOffsets = dstOffsets_;
  18290. return *this;
  18291. }
  18292. operator VkImageBlit2KHR const&() const VULKAN_HPP_NOEXCEPT
  18293. {
  18294. return *reinterpret_cast<const VkImageBlit2KHR*>( this );
  18295. }
  18296. operator VkImageBlit2KHR &() VULKAN_HPP_NOEXCEPT
  18297. {
  18298. return *reinterpret_cast<VkImageBlit2KHR*>( this );
  18299. }
  18300. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18301. auto operator<=>( ImageBlit2KHR const& ) const = default;
  18302. #else
  18303. bool operator==( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18304. {
  18305. return ( sType == rhs.sType )
  18306. && ( pNext == rhs.pNext )
  18307. && ( srcSubresource == rhs.srcSubresource )
  18308. && ( srcOffsets == rhs.srcOffsets )
  18309. && ( dstSubresource == rhs.dstSubresource )
  18310. && ( dstOffsets == rhs.dstOffsets );
  18311. }
  18312. bool operator!=( ImageBlit2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18313. {
  18314. return !operator==( rhs );
  18315. }
  18316. #endif
  18317. public:
  18318. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageBlit2KHR;
  18319. const void* pNext = {};
  18320. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  18321. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
  18322. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  18323. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
  18324. };
  18325. static_assert( sizeof( ImageBlit2KHR ) == sizeof( VkImageBlit2KHR ), "struct and wrapper have different size!" );
  18326. static_assert( std::is_standard_layout<ImageBlit2KHR>::value, "struct wrapper is not a standard layout!" );
  18327. template <>
  18328. struct CppType<StructureType, StructureType::eImageBlit2KHR>
  18329. {
  18330. using Type = ImageBlit2KHR;
  18331. };
  18332. struct BlitImageInfo2KHR
  18333. {
  18334. static const bool allowDuplicate = false;
  18335. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBlitImageInfo2KHR;
  18336. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18337. VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ = {}, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest) VULKAN_HPP_NOEXCEPT
  18338. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ ), filter( filter_ )
  18339. {}
  18340. VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18341. BlitImageInfo2KHR( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18342. : BlitImageInfo2KHR( *reinterpret_cast<BlitImageInfo2KHR const *>( &rhs ) )
  18343. {}
  18344. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18345. BlitImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_, VULKAN_HPP_NAMESPACE::Filter filter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest )
  18346. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() ), filter( filter_ )
  18347. {}
  18348. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18349. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18350. VULKAN_HPP_CONSTEXPR_14 BlitImageInfo2KHR & operator=( BlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18351. BlitImageInfo2KHR & operator=( VkBlitImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18352. {
  18353. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR const *>( &rhs );
  18354. return *this;
  18355. }
  18356. BlitImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18357. {
  18358. pNext = pNext_;
  18359. return *this;
  18360. }
  18361. BlitImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
  18362. {
  18363. srcImage = srcImage_;
  18364. return *this;
  18365. }
  18366. BlitImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
  18367. {
  18368. srcImageLayout = srcImageLayout_;
  18369. return *this;
  18370. }
  18371. BlitImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
  18372. {
  18373. dstImage = dstImage_;
  18374. return *this;
  18375. }
  18376. BlitImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
  18377. {
  18378. dstImageLayout = dstImageLayout_;
  18379. return *this;
  18380. }
  18381. BlitImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  18382. {
  18383. regionCount = regionCount_;
  18384. return *this;
  18385. }
  18386. BlitImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  18387. {
  18388. pRegions = pRegions_;
  18389. return *this;
  18390. }
  18391. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18392. BlitImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageBlit2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  18393. {
  18394. regionCount = static_cast<uint32_t>( regions_.size() );
  18395. pRegions = regions_.data();
  18396. return *this;
  18397. }
  18398. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18399. BlitImageInfo2KHR & setFilter( VULKAN_HPP_NAMESPACE::Filter filter_ ) VULKAN_HPP_NOEXCEPT
  18400. {
  18401. filter = filter_;
  18402. return *this;
  18403. }
  18404. operator VkBlitImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  18405. {
  18406. return *reinterpret_cast<const VkBlitImageInfo2KHR*>( this );
  18407. }
  18408. operator VkBlitImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
  18409. {
  18410. return *reinterpret_cast<VkBlitImageInfo2KHR*>( this );
  18411. }
  18412. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18413. auto operator<=>( BlitImageInfo2KHR const& ) const = default;
  18414. #else
  18415. bool operator==( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18416. {
  18417. return ( sType == rhs.sType )
  18418. && ( pNext == rhs.pNext )
  18419. && ( srcImage == rhs.srcImage )
  18420. && ( srcImageLayout == rhs.srcImageLayout )
  18421. && ( dstImage == rhs.dstImage )
  18422. && ( dstImageLayout == rhs.dstImageLayout )
  18423. && ( regionCount == rhs.regionCount )
  18424. && ( pRegions == rhs.pRegions )
  18425. && ( filter == rhs.filter );
  18426. }
  18427. bool operator!=( BlitImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18428. {
  18429. return !operator==( rhs );
  18430. }
  18431. #endif
  18432. public:
  18433. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBlitImageInfo2KHR;
  18434. const void* pNext = {};
  18435. VULKAN_HPP_NAMESPACE::Image srcImage = {};
  18436. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  18437. VULKAN_HPP_NAMESPACE::Image dstImage = {};
  18438. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  18439. uint32_t regionCount = {};
  18440. const VULKAN_HPP_NAMESPACE::ImageBlit2KHR* pRegions = {};
  18441. VULKAN_HPP_NAMESPACE::Filter filter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
  18442. };
  18443. static_assert( sizeof( BlitImageInfo2KHR ) == sizeof( VkBlitImageInfo2KHR ), "struct and wrapper have different size!" );
  18444. static_assert( std::is_standard_layout<BlitImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  18445. template <>
  18446. struct CppType<StructureType, StructureType::eBlitImageInfo2KHR>
  18447. {
  18448. using Type = BlitImageInfo2KHR;
  18449. };
  18450. struct BufferCopy
  18451. {
  18452. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18453. VULKAN_HPP_CONSTEXPR BufferCopy(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  18454. : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
  18455. {}
  18456. VULKAN_HPP_CONSTEXPR BufferCopy( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18457. BufferCopy( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  18458. : BufferCopy( *reinterpret_cast<BufferCopy const *>( &rhs ) )
  18459. {}
  18460. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18461. VULKAN_HPP_CONSTEXPR_14 BufferCopy & operator=( BufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18462. BufferCopy & operator=( VkBufferCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  18463. {
  18464. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy const *>( &rhs );
  18465. return *this;
  18466. }
  18467. BufferCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
  18468. {
  18469. srcOffset = srcOffset_;
  18470. return *this;
  18471. }
  18472. BufferCopy & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
  18473. {
  18474. dstOffset = dstOffset_;
  18475. return *this;
  18476. }
  18477. BufferCopy & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  18478. {
  18479. size = size_;
  18480. return *this;
  18481. }
  18482. operator VkBufferCopy const&() const VULKAN_HPP_NOEXCEPT
  18483. {
  18484. return *reinterpret_cast<const VkBufferCopy*>( this );
  18485. }
  18486. operator VkBufferCopy &() VULKAN_HPP_NOEXCEPT
  18487. {
  18488. return *reinterpret_cast<VkBufferCopy*>( this );
  18489. }
  18490. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18491. auto operator<=>( BufferCopy const& ) const = default;
  18492. #else
  18493. bool operator==( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  18494. {
  18495. return ( srcOffset == rhs.srcOffset )
  18496. && ( dstOffset == rhs.dstOffset )
  18497. && ( size == rhs.size );
  18498. }
  18499. bool operator!=( BufferCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  18500. {
  18501. return !operator==( rhs );
  18502. }
  18503. #endif
  18504. public:
  18505. VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
  18506. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
  18507. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  18508. };
  18509. static_assert( sizeof( BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
  18510. static_assert( std::is_standard_layout<BufferCopy>::value, "struct wrapper is not a standard layout!" );
  18511. struct BufferCopy2KHR
  18512. {
  18513. static const bool allowDuplicate = false;
  18514. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCopy2KHR;
  18515. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18516. VULKAN_HPP_CONSTEXPR BufferCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  18517. : srcOffset( srcOffset_ ), dstOffset( dstOffset_ ), size( size_ )
  18518. {}
  18519. VULKAN_HPP_CONSTEXPR BufferCopy2KHR( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18520. BufferCopy2KHR( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18521. : BufferCopy2KHR( *reinterpret_cast<BufferCopy2KHR const *>( &rhs ) )
  18522. {}
  18523. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18524. VULKAN_HPP_CONSTEXPR_14 BufferCopy2KHR & operator=( BufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18525. BufferCopy2KHR & operator=( VkBufferCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18526. {
  18527. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCopy2KHR const *>( &rhs );
  18528. return *this;
  18529. }
  18530. BufferCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18531. {
  18532. pNext = pNext_;
  18533. return *this;
  18534. }
  18535. BufferCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::DeviceSize srcOffset_ ) VULKAN_HPP_NOEXCEPT
  18536. {
  18537. srcOffset = srcOffset_;
  18538. return *this;
  18539. }
  18540. BufferCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::DeviceSize dstOffset_ ) VULKAN_HPP_NOEXCEPT
  18541. {
  18542. dstOffset = dstOffset_;
  18543. return *this;
  18544. }
  18545. BufferCopy2KHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  18546. {
  18547. size = size_;
  18548. return *this;
  18549. }
  18550. operator VkBufferCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
  18551. {
  18552. return *reinterpret_cast<const VkBufferCopy2KHR*>( this );
  18553. }
  18554. operator VkBufferCopy2KHR &() VULKAN_HPP_NOEXCEPT
  18555. {
  18556. return *reinterpret_cast<VkBufferCopy2KHR*>( this );
  18557. }
  18558. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18559. auto operator<=>( BufferCopy2KHR const& ) const = default;
  18560. #else
  18561. bool operator==( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18562. {
  18563. return ( sType == rhs.sType )
  18564. && ( pNext == rhs.pNext )
  18565. && ( srcOffset == rhs.srcOffset )
  18566. && ( dstOffset == rhs.dstOffset )
  18567. && ( size == rhs.size );
  18568. }
  18569. bool operator!=( BufferCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18570. {
  18571. return !operator==( rhs );
  18572. }
  18573. #endif
  18574. public:
  18575. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCopy2KHR;
  18576. const void* pNext = {};
  18577. VULKAN_HPP_NAMESPACE::DeviceSize srcOffset = {};
  18578. VULKAN_HPP_NAMESPACE::DeviceSize dstOffset = {};
  18579. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  18580. };
  18581. static_assert( sizeof( BufferCopy2KHR ) == sizeof( VkBufferCopy2KHR ), "struct and wrapper have different size!" );
  18582. static_assert( std::is_standard_layout<BufferCopy2KHR>::value, "struct wrapper is not a standard layout!" );
  18583. template <>
  18584. struct CppType<StructureType, StructureType::eBufferCopy2KHR>
  18585. {
  18586. using Type = BufferCopy2KHR;
  18587. };
  18588. struct BufferCreateInfo
  18589. {
  18590. static const bool allowDuplicate = false;
  18591. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferCreateInfo;
  18592. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18593. VULKAN_HPP_CONSTEXPR BufferCreateInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
  18594. : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
  18595. {}
  18596. VULKAN_HPP_CONSTEXPR BufferCreateInfo( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18597. BufferCreateInfo( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  18598. : BufferCreateInfo( *reinterpret_cast<BufferCreateInfo const *>( &rhs ) )
  18599. {}
  18600. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18601. BufferCreateInfo( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::DeviceSize size_, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
  18602. : flags( flags_ ), size( size_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
  18603. {}
  18604. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18605. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18606. VULKAN_HPP_CONSTEXPR_14 BufferCreateInfo & operator=( BufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18607. BufferCreateInfo & operator=( VkBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  18608. {
  18609. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferCreateInfo const *>( &rhs );
  18610. return *this;
  18611. }
  18612. BufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18613. {
  18614. pNext = pNext_;
  18615. return *this;
  18616. }
  18617. BufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  18618. {
  18619. flags = flags_;
  18620. return *this;
  18621. }
  18622. BufferCreateInfo & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  18623. {
  18624. size = size_;
  18625. return *this;
  18626. }
  18627. BufferCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  18628. {
  18629. usage = usage_;
  18630. return *this;
  18631. }
  18632. BufferCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
  18633. {
  18634. sharingMode = sharingMode_;
  18635. return *this;
  18636. }
  18637. BufferCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
  18638. {
  18639. queueFamilyIndexCount = queueFamilyIndexCount_;
  18640. return *this;
  18641. }
  18642. BufferCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  18643. {
  18644. pQueueFamilyIndices = pQueueFamilyIndices_;
  18645. return *this;
  18646. }
  18647. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18648. BufferCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  18649. {
  18650. queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
  18651. pQueueFamilyIndices = queueFamilyIndices_.data();
  18652. return *this;
  18653. }
  18654. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  18655. operator VkBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  18656. {
  18657. return *reinterpret_cast<const VkBufferCreateInfo*>( this );
  18658. }
  18659. operator VkBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
  18660. {
  18661. return *reinterpret_cast<VkBufferCreateInfo*>( this );
  18662. }
  18663. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18664. auto operator<=>( BufferCreateInfo const& ) const = default;
  18665. #else
  18666. bool operator==( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18667. {
  18668. return ( sType == rhs.sType )
  18669. && ( pNext == rhs.pNext )
  18670. && ( flags == rhs.flags )
  18671. && ( size == rhs.size )
  18672. && ( usage == rhs.usage )
  18673. && ( sharingMode == rhs.sharingMode )
  18674. && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
  18675. && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
  18676. }
  18677. bool operator!=( BufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18678. {
  18679. return !operator==( rhs );
  18680. }
  18681. #endif
  18682. public:
  18683. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferCreateInfo;
  18684. const void* pNext = {};
  18685. VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
  18686. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  18687. VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
  18688. VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
  18689. uint32_t queueFamilyIndexCount = {};
  18690. const uint32_t* pQueueFamilyIndices = {};
  18691. };
  18692. static_assert( sizeof( BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
  18693. static_assert( std::is_standard_layout<BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
  18694. template <>
  18695. struct CppType<StructureType, StructureType::eBufferCreateInfo>
  18696. {
  18697. using Type = BufferCreateInfo;
  18698. };
  18699. struct BufferDeviceAddressCreateInfoEXT
  18700. {
  18701. static const bool allowDuplicate = false;
  18702. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressCreateInfoEXT;
  18703. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18704. VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}) VULKAN_HPP_NOEXCEPT
  18705. : deviceAddress( deviceAddress_ )
  18706. {}
  18707. VULKAN_HPP_CONSTEXPR BufferDeviceAddressCreateInfoEXT( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18708. BufferDeviceAddressCreateInfoEXT( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  18709. : BufferDeviceAddressCreateInfoEXT( *reinterpret_cast<BufferDeviceAddressCreateInfoEXT const *>( &rhs ) )
  18710. {}
  18711. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18712. VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressCreateInfoEXT & operator=( BufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18713. BufferDeviceAddressCreateInfoEXT & operator=( VkBufferDeviceAddressCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  18714. {
  18715. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT const *>( &rhs );
  18716. return *this;
  18717. }
  18718. BufferDeviceAddressCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18719. {
  18720. pNext = pNext_;
  18721. return *this;
  18722. }
  18723. BufferDeviceAddressCreateInfoEXT & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
  18724. {
  18725. deviceAddress = deviceAddress_;
  18726. return *this;
  18727. }
  18728. operator VkBufferDeviceAddressCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  18729. {
  18730. return *reinterpret_cast<const VkBufferDeviceAddressCreateInfoEXT*>( this );
  18731. }
  18732. operator VkBufferDeviceAddressCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  18733. {
  18734. return *reinterpret_cast<VkBufferDeviceAddressCreateInfoEXT*>( this );
  18735. }
  18736. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18737. auto operator<=>( BufferDeviceAddressCreateInfoEXT const& ) const = default;
  18738. #else
  18739. bool operator==( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  18740. {
  18741. return ( sType == rhs.sType )
  18742. && ( pNext == rhs.pNext )
  18743. && ( deviceAddress == rhs.deviceAddress );
  18744. }
  18745. bool operator!=( BufferDeviceAddressCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  18746. {
  18747. return !operator==( rhs );
  18748. }
  18749. #endif
  18750. public:
  18751. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressCreateInfoEXT;
  18752. const void* pNext = {};
  18753. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  18754. };
  18755. static_assert( sizeof( BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ), "struct and wrapper have different size!" );
  18756. static_assert( std::is_standard_layout<BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  18757. template <>
  18758. struct CppType<StructureType, StructureType::eBufferDeviceAddressCreateInfoEXT>
  18759. {
  18760. using Type = BufferDeviceAddressCreateInfoEXT;
  18761. };
  18762. struct BufferDeviceAddressInfo
  18763. {
  18764. static const bool allowDuplicate = false;
  18765. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferDeviceAddressInfo;
  18766. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18767. VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
  18768. : buffer( buffer_ )
  18769. {}
  18770. VULKAN_HPP_CONSTEXPR BufferDeviceAddressInfo( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18771. BufferDeviceAddressInfo( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  18772. : BufferDeviceAddressInfo( *reinterpret_cast<BufferDeviceAddressInfo const *>( &rhs ) )
  18773. {}
  18774. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18775. VULKAN_HPP_CONSTEXPR_14 BufferDeviceAddressInfo & operator=( BufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18776. BufferDeviceAddressInfo & operator=( VkBufferDeviceAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  18777. {
  18778. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo const *>( &rhs );
  18779. return *this;
  18780. }
  18781. BufferDeviceAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18782. {
  18783. pNext = pNext_;
  18784. return *this;
  18785. }
  18786. BufferDeviceAddressInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  18787. {
  18788. buffer = buffer_;
  18789. return *this;
  18790. }
  18791. operator VkBufferDeviceAddressInfo const&() const VULKAN_HPP_NOEXCEPT
  18792. {
  18793. return *reinterpret_cast<const VkBufferDeviceAddressInfo*>( this );
  18794. }
  18795. operator VkBufferDeviceAddressInfo &() VULKAN_HPP_NOEXCEPT
  18796. {
  18797. return *reinterpret_cast<VkBufferDeviceAddressInfo*>( this );
  18798. }
  18799. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18800. auto operator<=>( BufferDeviceAddressInfo const& ) const = default;
  18801. #else
  18802. bool operator==( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18803. {
  18804. return ( sType == rhs.sType )
  18805. && ( pNext == rhs.pNext )
  18806. && ( buffer == rhs.buffer );
  18807. }
  18808. bool operator!=( BufferDeviceAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  18809. {
  18810. return !operator==( rhs );
  18811. }
  18812. #endif
  18813. public:
  18814. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferDeviceAddressInfo;
  18815. const void* pNext = {};
  18816. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  18817. };
  18818. static_assert( sizeof( BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ), "struct and wrapper have different size!" );
  18819. static_assert( std::is_standard_layout<BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
  18820. template <>
  18821. struct CppType<StructureType, StructureType::eBufferDeviceAddressInfo>
  18822. {
  18823. using Type = BufferDeviceAddressInfo;
  18824. };
  18825. using BufferDeviceAddressInfoEXT = BufferDeviceAddressInfo;
  18826. using BufferDeviceAddressInfoKHR = BufferDeviceAddressInfo;
  18827. struct BufferImageCopy
  18828. {
  18829. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18830. VULKAN_HPP_CONSTEXPR BufferImageCopy(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
  18831. : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
  18832. {}
  18833. VULKAN_HPP_CONSTEXPR BufferImageCopy( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18834. BufferImageCopy( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  18835. : BufferImageCopy( *reinterpret_cast<BufferImageCopy const *>( &rhs ) )
  18836. {}
  18837. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18838. VULKAN_HPP_CONSTEXPR_14 BufferImageCopy & operator=( BufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18839. BufferImageCopy & operator=( VkBufferImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  18840. {
  18841. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy const *>( &rhs );
  18842. return *this;
  18843. }
  18844. BufferImageCopy & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
  18845. {
  18846. bufferOffset = bufferOffset_;
  18847. return *this;
  18848. }
  18849. BufferImageCopy & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
  18850. {
  18851. bufferRowLength = bufferRowLength_;
  18852. return *this;
  18853. }
  18854. BufferImageCopy & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
  18855. {
  18856. bufferImageHeight = bufferImageHeight_;
  18857. return *this;
  18858. }
  18859. BufferImageCopy & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
  18860. {
  18861. imageSubresource = imageSubresource_;
  18862. return *this;
  18863. }
  18864. BufferImageCopy & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
  18865. {
  18866. imageOffset = imageOffset_;
  18867. return *this;
  18868. }
  18869. BufferImageCopy & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
  18870. {
  18871. imageExtent = imageExtent_;
  18872. return *this;
  18873. }
  18874. operator VkBufferImageCopy const&() const VULKAN_HPP_NOEXCEPT
  18875. {
  18876. return *reinterpret_cast<const VkBufferImageCopy*>( this );
  18877. }
  18878. operator VkBufferImageCopy &() VULKAN_HPP_NOEXCEPT
  18879. {
  18880. return *reinterpret_cast<VkBufferImageCopy*>( this );
  18881. }
  18882. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18883. auto operator<=>( BufferImageCopy const& ) const = default;
  18884. #else
  18885. bool operator==( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  18886. {
  18887. return ( bufferOffset == rhs.bufferOffset )
  18888. && ( bufferRowLength == rhs.bufferRowLength )
  18889. && ( bufferImageHeight == rhs.bufferImageHeight )
  18890. && ( imageSubresource == rhs.imageSubresource )
  18891. && ( imageOffset == rhs.imageOffset )
  18892. && ( imageExtent == rhs.imageExtent );
  18893. }
  18894. bool operator!=( BufferImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  18895. {
  18896. return !operator==( rhs );
  18897. }
  18898. #endif
  18899. public:
  18900. VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
  18901. uint32_t bufferRowLength = {};
  18902. uint32_t bufferImageHeight = {};
  18903. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
  18904. VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
  18905. VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
  18906. };
  18907. static_assert( sizeof( BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
  18908. static_assert( std::is_standard_layout<BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
  18909. struct BufferImageCopy2KHR
  18910. {
  18911. static const bool allowDuplicate = false;
  18912. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferImageCopy2KHR;
  18913. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18914. VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR(VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ = {}, uint32_t bufferRowLength_ = {}, uint32_t bufferImageHeight_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D imageOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
  18915. : bufferOffset( bufferOffset_ ), bufferRowLength( bufferRowLength_ ), bufferImageHeight( bufferImageHeight_ ), imageSubresource( imageSubresource_ ), imageOffset( imageOffset_ ), imageExtent( imageExtent_ )
  18916. {}
  18917. VULKAN_HPP_CONSTEXPR BufferImageCopy2KHR( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18918. BufferImageCopy2KHR( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18919. : BufferImageCopy2KHR( *reinterpret_cast<BufferImageCopy2KHR const *>( &rhs ) )
  18920. {}
  18921. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  18922. VULKAN_HPP_CONSTEXPR_14 BufferImageCopy2KHR & operator=( BufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  18923. BufferImageCopy2KHR & operator=( VkBufferImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  18924. {
  18925. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR const *>( &rhs );
  18926. return *this;
  18927. }
  18928. BufferImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  18929. {
  18930. pNext = pNext_;
  18931. return *this;
  18932. }
  18933. BufferImageCopy2KHR & setBufferOffset( VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset_ ) VULKAN_HPP_NOEXCEPT
  18934. {
  18935. bufferOffset = bufferOffset_;
  18936. return *this;
  18937. }
  18938. BufferImageCopy2KHR & setBufferRowLength( uint32_t bufferRowLength_ ) VULKAN_HPP_NOEXCEPT
  18939. {
  18940. bufferRowLength = bufferRowLength_;
  18941. return *this;
  18942. }
  18943. BufferImageCopy2KHR & setBufferImageHeight( uint32_t bufferImageHeight_ ) VULKAN_HPP_NOEXCEPT
  18944. {
  18945. bufferImageHeight = bufferImageHeight_;
  18946. return *this;
  18947. }
  18948. BufferImageCopy2KHR & setImageSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & imageSubresource_ ) VULKAN_HPP_NOEXCEPT
  18949. {
  18950. imageSubresource = imageSubresource_;
  18951. return *this;
  18952. }
  18953. BufferImageCopy2KHR & setImageOffset( VULKAN_HPP_NAMESPACE::Offset3D const & imageOffset_ ) VULKAN_HPP_NOEXCEPT
  18954. {
  18955. imageOffset = imageOffset_;
  18956. return *this;
  18957. }
  18958. BufferImageCopy2KHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent3D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
  18959. {
  18960. imageExtent = imageExtent_;
  18961. return *this;
  18962. }
  18963. operator VkBufferImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
  18964. {
  18965. return *reinterpret_cast<const VkBufferImageCopy2KHR*>( this );
  18966. }
  18967. operator VkBufferImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
  18968. {
  18969. return *reinterpret_cast<VkBufferImageCopy2KHR*>( this );
  18970. }
  18971. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  18972. auto operator<=>( BufferImageCopy2KHR const& ) const = default;
  18973. #else
  18974. bool operator==( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18975. {
  18976. return ( sType == rhs.sType )
  18977. && ( pNext == rhs.pNext )
  18978. && ( bufferOffset == rhs.bufferOffset )
  18979. && ( bufferRowLength == rhs.bufferRowLength )
  18980. && ( bufferImageHeight == rhs.bufferImageHeight )
  18981. && ( imageSubresource == rhs.imageSubresource )
  18982. && ( imageOffset == rhs.imageOffset )
  18983. && ( imageExtent == rhs.imageExtent );
  18984. }
  18985. bool operator!=( BufferImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  18986. {
  18987. return !operator==( rhs );
  18988. }
  18989. #endif
  18990. public:
  18991. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferImageCopy2KHR;
  18992. const void* pNext = {};
  18993. VULKAN_HPP_NAMESPACE::DeviceSize bufferOffset = {};
  18994. uint32_t bufferRowLength = {};
  18995. uint32_t bufferImageHeight = {};
  18996. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers imageSubresource = {};
  18997. VULKAN_HPP_NAMESPACE::Offset3D imageOffset = {};
  18998. VULKAN_HPP_NAMESPACE::Extent3D imageExtent = {};
  18999. };
  19000. static_assert( sizeof( BufferImageCopy2KHR ) == sizeof( VkBufferImageCopy2KHR ), "struct and wrapper have different size!" );
  19001. static_assert( std::is_standard_layout<BufferImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
  19002. template <>
  19003. struct CppType<StructureType, StructureType::eBufferImageCopy2KHR>
  19004. {
  19005. using Type = BufferImageCopy2KHR;
  19006. };
  19007. struct BufferMemoryBarrier
  19008. {
  19009. static const bool allowDuplicate = false;
  19010. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryBarrier;
  19011. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19012. VULKAN_HPP_CONSTEXPR BufferMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  19013. : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), buffer( buffer_ ), offset( offset_ ), size( size_ )
  19014. {}
  19015. VULKAN_HPP_CONSTEXPR BufferMemoryBarrier( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19016. BufferMemoryBarrier( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  19017. : BufferMemoryBarrier( *reinterpret_cast<BufferMemoryBarrier const *>( &rhs ) )
  19018. {}
  19019. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19020. VULKAN_HPP_CONSTEXPR_14 BufferMemoryBarrier & operator=( BufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19021. BufferMemoryBarrier & operator=( VkBufferMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  19022. {
  19023. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier const *>( &rhs );
  19024. return *this;
  19025. }
  19026. BufferMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19027. {
  19028. pNext = pNext_;
  19029. return *this;
  19030. }
  19031. BufferMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
  19032. {
  19033. srcAccessMask = srcAccessMask_;
  19034. return *this;
  19035. }
  19036. BufferMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
  19037. {
  19038. dstAccessMask = dstAccessMask_;
  19039. return *this;
  19040. }
  19041. BufferMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  19042. {
  19043. srcQueueFamilyIndex = srcQueueFamilyIndex_;
  19044. return *this;
  19045. }
  19046. BufferMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  19047. {
  19048. dstQueueFamilyIndex = dstQueueFamilyIndex_;
  19049. return *this;
  19050. }
  19051. BufferMemoryBarrier & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  19052. {
  19053. buffer = buffer_;
  19054. return *this;
  19055. }
  19056. BufferMemoryBarrier & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  19057. {
  19058. offset = offset_;
  19059. return *this;
  19060. }
  19061. BufferMemoryBarrier & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  19062. {
  19063. size = size_;
  19064. return *this;
  19065. }
  19066. operator VkBufferMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
  19067. {
  19068. return *reinterpret_cast<const VkBufferMemoryBarrier*>( this );
  19069. }
  19070. operator VkBufferMemoryBarrier &() VULKAN_HPP_NOEXCEPT
  19071. {
  19072. return *reinterpret_cast<VkBufferMemoryBarrier*>( this );
  19073. }
  19074. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19075. auto operator<=>( BufferMemoryBarrier const& ) const = default;
  19076. #else
  19077. bool operator==( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  19078. {
  19079. return ( sType == rhs.sType )
  19080. && ( pNext == rhs.pNext )
  19081. && ( srcAccessMask == rhs.srcAccessMask )
  19082. && ( dstAccessMask == rhs.dstAccessMask )
  19083. && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
  19084. && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
  19085. && ( buffer == rhs.buffer )
  19086. && ( offset == rhs.offset )
  19087. && ( size == rhs.size );
  19088. }
  19089. bool operator!=( BufferMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  19090. {
  19091. return !operator==( rhs );
  19092. }
  19093. #endif
  19094. public:
  19095. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryBarrier;
  19096. const void* pNext = {};
  19097. VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
  19098. VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
  19099. uint32_t srcQueueFamilyIndex = {};
  19100. uint32_t dstQueueFamilyIndex = {};
  19101. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  19102. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  19103. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  19104. };
  19105. static_assert( sizeof( BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
  19106. static_assert( std::is_standard_layout<BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
  19107. template <>
  19108. struct CppType<StructureType, StructureType::eBufferMemoryBarrier>
  19109. {
  19110. using Type = BufferMemoryBarrier;
  19111. };
  19112. struct BufferMemoryRequirementsInfo2
  19113. {
  19114. static const bool allowDuplicate = false;
  19115. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferMemoryRequirementsInfo2;
  19116. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19117. VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
  19118. : buffer( buffer_ )
  19119. {}
  19120. VULKAN_HPP_CONSTEXPR BufferMemoryRequirementsInfo2( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19121. BufferMemoryRequirementsInfo2( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  19122. : BufferMemoryRequirementsInfo2( *reinterpret_cast<BufferMemoryRequirementsInfo2 const *>( &rhs ) )
  19123. {}
  19124. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19125. VULKAN_HPP_CONSTEXPR_14 BufferMemoryRequirementsInfo2 & operator=( BufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19126. BufferMemoryRequirementsInfo2 & operator=( VkBufferMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  19127. {
  19128. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 const *>( &rhs );
  19129. return *this;
  19130. }
  19131. BufferMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19132. {
  19133. pNext = pNext_;
  19134. return *this;
  19135. }
  19136. BufferMemoryRequirementsInfo2 & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  19137. {
  19138. buffer = buffer_;
  19139. return *this;
  19140. }
  19141. operator VkBufferMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
  19142. {
  19143. return *reinterpret_cast<const VkBufferMemoryRequirementsInfo2*>( this );
  19144. }
  19145. operator VkBufferMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
  19146. {
  19147. return *reinterpret_cast<VkBufferMemoryRequirementsInfo2*>( this );
  19148. }
  19149. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19150. auto operator<=>( BufferMemoryRequirementsInfo2 const& ) const = default;
  19151. #else
  19152. bool operator==( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  19153. {
  19154. return ( sType == rhs.sType )
  19155. && ( pNext == rhs.pNext )
  19156. && ( buffer == rhs.buffer );
  19157. }
  19158. bool operator!=( BufferMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  19159. {
  19160. return !operator==( rhs );
  19161. }
  19162. #endif
  19163. public:
  19164. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferMemoryRequirementsInfo2;
  19165. const void* pNext = {};
  19166. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  19167. };
  19168. static_assert( sizeof( BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
  19169. static_assert( std::is_standard_layout<BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
  19170. template <>
  19171. struct CppType<StructureType, StructureType::eBufferMemoryRequirementsInfo2>
  19172. {
  19173. using Type = BufferMemoryRequirementsInfo2;
  19174. };
  19175. using BufferMemoryRequirementsInfo2KHR = BufferMemoryRequirementsInfo2;
  19176. struct BufferOpaqueCaptureAddressCreateInfo
  19177. {
  19178. static const bool allowDuplicate = false;
  19179. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
  19180. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19181. VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
  19182. : opaqueCaptureAddress( opaqueCaptureAddress_ )
  19183. {}
  19184. VULKAN_HPP_CONSTEXPR BufferOpaqueCaptureAddressCreateInfo( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19185. BufferOpaqueCaptureAddressCreateInfo( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19186. : BufferOpaqueCaptureAddressCreateInfo( *reinterpret_cast<BufferOpaqueCaptureAddressCreateInfo const *>( &rhs ) )
  19187. {}
  19188. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19189. VULKAN_HPP_CONSTEXPR_14 BufferOpaqueCaptureAddressCreateInfo & operator=( BufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19190. BufferOpaqueCaptureAddressCreateInfo & operator=( VkBufferOpaqueCaptureAddressCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19191. {
  19192. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo const *>( &rhs );
  19193. return *this;
  19194. }
  19195. BufferOpaqueCaptureAddressCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19196. {
  19197. pNext = pNext_;
  19198. return *this;
  19199. }
  19200. BufferOpaqueCaptureAddressCreateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
  19201. {
  19202. opaqueCaptureAddress = opaqueCaptureAddress_;
  19203. return *this;
  19204. }
  19205. operator VkBufferOpaqueCaptureAddressCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  19206. {
  19207. return *reinterpret_cast<const VkBufferOpaqueCaptureAddressCreateInfo*>( this );
  19208. }
  19209. operator VkBufferOpaqueCaptureAddressCreateInfo &() VULKAN_HPP_NOEXCEPT
  19210. {
  19211. return *reinterpret_cast<VkBufferOpaqueCaptureAddressCreateInfo*>( this );
  19212. }
  19213. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19214. auto operator<=>( BufferOpaqueCaptureAddressCreateInfo const& ) const = default;
  19215. #else
  19216. bool operator==( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19217. {
  19218. return ( sType == rhs.sType )
  19219. && ( pNext == rhs.pNext )
  19220. && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
  19221. }
  19222. bool operator!=( BufferOpaqueCaptureAddressCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19223. {
  19224. return !operator==( rhs );
  19225. }
  19226. #endif
  19227. public:
  19228. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferOpaqueCaptureAddressCreateInfo;
  19229. const void* pNext = {};
  19230. uint64_t opaqueCaptureAddress = {};
  19231. };
  19232. static_assert( sizeof( BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ), "struct and wrapper have different size!" );
  19233. static_assert( std::is_standard_layout<BufferOpaqueCaptureAddressCreateInfo>::value, "struct wrapper is not a standard layout!" );
  19234. template <>
  19235. struct CppType<StructureType, StructureType::eBufferOpaqueCaptureAddressCreateInfo>
  19236. {
  19237. using Type = BufferOpaqueCaptureAddressCreateInfo;
  19238. };
  19239. using BufferOpaqueCaptureAddressCreateInfoKHR = BufferOpaqueCaptureAddressCreateInfo;
  19240. struct BufferViewCreateInfo
  19241. {
  19242. static const bool allowDuplicate = false;
  19243. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eBufferViewCreateInfo;
  19244. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19245. VULKAN_HPP_CONSTEXPR BufferViewCreateInfo(VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
  19246. : flags( flags_ ), buffer( buffer_ ), format( format_ ), offset( offset_ ), range( range_ )
  19247. {}
  19248. VULKAN_HPP_CONSTEXPR BufferViewCreateInfo( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19249. BufferViewCreateInfo( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19250. : BufferViewCreateInfo( *reinterpret_cast<BufferViewCreateInfo const *>( &rhs ) )
  19251. {}
  19252. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19253. VULKAN_HPP_CONSTEXPR_14 BufferViewCreateInfo & operator=( BufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19254. BufferViewCreateInfo & operator=( VkBufferViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19255. {
  19256. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo const *>( &rhs );
  19257. return *this;
  19258. }
  19259. BufferViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19260. {
  19261. pNext = pNext_;
  19262. return *this;
  19263. }
  19264. BufferViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  19265. {
  19266. flags = flags_;
  19267. return *this;
  19268. }
  19269. BufferViewCreateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  19270. {
  19271. buffer = buffer_;
  19272. return *this;
  19273. }
  19274. BufferViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  19275. {
  19276. format = format_;
  19277. return *this;
  19278. }
  19279. BufferViewCreateInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  19280. {
  19281. offset = offset_;
  19282. return *this;
  19283. }
  19284. BufferViewCreateInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
  19285. {
  19286. range = range_;
  19287. return *this;
  19288. }
  19289. operator VkBufferViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  19290. {
  19291. return *reinterpret_cast<const VkBufferViewCreateInfo*>( this );
  19292. }
  19293. operator VkBufferViewCreateInfo &() VULKAN_HPP_NOEXCEPT
  19294. {
  19295. return *reinterpret_cast<VkBufferViewCreateInfo*>( this );
  19296. }
  19297. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19298. auto operator<=>( BufferViewCreateInfo const& ) const = default;
  19299. #else
  19300. bool operator==( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19301. {
  19302. return ( sType == rhs.sType )
  19303. && ( pNext == rhs.pNext )
  19304. && ( flags == rhs.flags )
  19305. && ( buffer == rhs.buffer )
  19306. && ( format == rhs.format )
  19307. && ( offset == rhs.offset )
  19308. && ( range == rhs.range );
  19309. }
  19310. bool operator!=( BufferViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19311. {
  19312. return !operator==( rhs );
  19313. }
  19314. #endif
  19315. public:
  19316. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eBufferViewCreateInfo;
  19317. const void* pNext = {};
  19318. VULKAN_HPP_NAMESPACE::BufferViewCreateFlags flags = {};
  19319. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  19320. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  19321. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  19322. VULKAN_HPP_NAMESPACE::DeviceSize range = {};
  19323. };
  19324. static_assert( sizeof( BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
  19325. static_assert( std::is_standard_layout<BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
  19326. template <>
  19327. struct CppType<StructureType, StructureType::eBufferViewCreateInfo>
  19328. {
  19329. using Type = BufferViewCreateInfo;
  19330. };
  19331. struct CalibratedTimestampInfoEXT
  19332. {
  19333. static const bool allowDuplicate = false;
  19334. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCalibratedTimestampInfoEXT;
  19335. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19336. VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT(VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice) VULKAN_HPP_NOEXCEPT
  19337. : timeDomain( timeDomain_ )
  19338. {}
  19339. VULKAN_HPP_CONSTEXPR CalibratedTimestampInfoEXT( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19340. CalibratedTimestampInfoEXT( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  19341. : CalibratedTimestampInfoEXT( *reinterpret_cast<CalibratedTimestampInfoEXT const *>( &rhs ) )
  19342. {}
  19343. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19344. VULKAN_HPP_CONSTEXPR_14 CalibratedTimestampInfoEXT & operator=( CalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19345. CalibratedTimestampInfoEXT & operator=( VkCalibratedTimestampInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  19346. {
  19347. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT const *>( &rhs );
  19348. return *this;
  19349. }
  19350. CalibratedTimestampInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19351. {
  19352. pNext = pNext_;
  19353. return *this;
  19354. }
  19355. CalibratedTimestampInfoEXT & setTimeDomain( VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain_ ) VULKAN_HPP_NOEXCEPT
  19356. {
  19357. timeDomain = timeDomain_;
  19358. return *this;
  19359. }
  19360. operator VkCalibratedTimestampInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  19361. {
  19362. return *reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( this );
  19363. }
  19364. operator VkCalibratedTimestampInfoEXT &() VULKAN_HPP_NOEXCEPT
  19365. {
  19366. return *reinterpret_cast<VkCalibratedTimestampInfoEXT*>( this );
  19367. }
  19368. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19369. auto operator<=>( CalibratedTimestampInfoEXT const& ) const = default;
  19370. #else
  19371. bool operator==( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  19372. {
  19373. return ( sType == rhs.sType )
  19374. && ( pNext == rhs.pNext )
  19375. && ( timeDomain == rhs.timeDomain );
  19376. }
  19377. bool operator!=( CalibratedTimestampInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  19378. {
  19379. return !operator==( rhs );
  19380. }
  19381. #endif
  19382. public:
  19383. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCalibratedTimestampInfoEXT;
  19384. const void* pNext = {};
  19385. VULKAN_HPP_NAMESPACE::TimeDomainEXT timeDomain = VULKAN_HPP_NAMESPACE::TimeDomainEXT::eDevice;
  19386. };
  19387. static_assert( sizeof( CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ), "struct and wrapper have different size!" );
  19388. static_assert( std::is_standard_layout<CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
  19389. template <>
  19390. struct CppType<StructureType, StructureType::eCalibratedTimestampInfoEXT>
  19391. {
  19392. using Type = CalibratedTimestampInfoEXT;
  19393. };
  19394. struct CheckpointDataNV
  19395. {
  19396. static const bool allowDuplicate = false;
  19397. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCheckpointDataNV;
  19398. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19399. VULKAN_HPP_CONSTEXPR CheckpointDataNV(VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe, void* pCheckpointMarker_ = {}) VULKAN_HPP_NOEXCEPT
  19400. : stage( stage_ ), pCheckpointMarker( pCheckpointMarker_ )
  19401. {}
  19402. VULKAN_HPP_CONSTEXPR CheckpointDataNV( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19403. CheckpointDataNV( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19404. : CheckpointDataNV( *reinterpret_cast<CheckpointDataNV const *>( &rhs ) )
  19405. {}
  19406. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19407. VULKAN_HPP_CONSTEXPR_14 CheckpointDataNV & operator=( CheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19408. CheckpointDataNV & operator=( VkCheckpointDataNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19409. {
  19410. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CheckpointDataNV const *>( &rhs );
  19411. return *this;
  19412. }
  19413. operator VkCheckpointDataNV const&() const VULKAN_HPP_NOEXCEPT
  19414. {
  19415. return *reinterpret_cast<const VkCheckpointDataNV*>( this );
  19416. }
  19417. operator VkCheckpointDataNV &() VULKAN_HPP_NOEXCEPT
  19418. {
  19419. return *reinterpret_cast<VkCheckpointDataNV*>( this );
  19420. }
  19421. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19422. auto operator<=>( CheckpointDataNV const& ) const = default;
  19423. #else
  19424. bool operator==( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19425. {
  19426. return ( sType == rhs.sType )
  19427. && ( pNext == rhs.pNext )
  19428. && ( stage == rhs.stage )
  19429. && ( pCheckpointMarker == rhs.pCheckpointMarker );
  19430. }
  19431. bool operator!=( CheckpointDataNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19432. {
  19433. return !operator==( rhs );
  19434. }
  19435. #endif
  19436. public:
  19437. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCheckpointDataNV;
  19438. void* pNext = {};
  19439. VULKAN_HPP_NAMESPACE::PipelineStageFlagBits stage = VULKAN_HPP_NAMESPACE::PipelineStageFlagBits::eTopOfPipe;
  19440. void* pCheckpointMarker = {};
  19441. };
  19442. static_assert( sizeof( CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
  19443. static_assert( std::is_standard_layout<CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
  19444. template <>
  19445. struct CppType<StructureType, StructureType::eCheckpointDataNV>
  19446. {
  19447. using Type = CheckpointDataNV;
  19448. };
  19449. union ClearColorValue
  19450. {
  19451. ClearColorValue( VULKAN_HPP_NAMESPACE::ClearColorValue const& rhs ) VULKAN_HPP_NOEXCEPT
  19452. {
  19453. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
  19454. }
  19455. ClearColorValue( const std::array<float,4>& float32_ = {} )
  19456. : float32( float32_ )
  19457. {}
  19458. ClearColorValue( const std::array<int32_t,4>& int32_ )
  19459. : int32( int32_ )
  19460. {}
  19461. ClearColorValue( const std::array<uint32_t,4>& uint32_ )
  19462. : uint32( uint32_ )
  19463. {}
  19464. ClearColorValue & setFloat32( std::array<float,4> float32_ ) VULKAN_HPP_NOEXCEPT
  19465. {
  19466. float32 = float32_;
  19467. return *this;
  19468. }
  19469. ClearColorValue & setInt32( std::array<int32_t,4> int32_ ) VULKAN_HPP_NOEXCEPT
  19470. {
  19471. int32 = int32_;
  19472. return *this;
  19473. }
  19474. ClearColorValue & setUint32( std::array<uint32_t,4> uint32_ ) VULKAN_HPP_NOEXCEPT
  19475. {
  19476. uint32 = uint32_;
  19477. return *this;
  19478. }
  19479. VULKAN_HPP_NAMESPACE::ClearColorValue & operator=( VULKAN_HPP_NAMESPACE::ClearColorValue const & rhs ) VULKAN_HPP_NOEXCEPT
  19480. {
  19481. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) );
  19482. return *this;
  19483. }
  19484. operator VkClearColorValue const&() const
  19485. {
  19486. return *reinterpret_cast<const VkClearColorValue*>(this);
  19487. }
  19488. operator VkClearColorValue &()
  19489. {
  19490. return *reinterpret_cast<VkClearColorValue*>(this);
  19491. }
  19492. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> float32;
  19493. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, 4> int32;
  19494. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 4> uint32;
  19495. };
  19496. struct ClearDepthStencilValue
  19497. {
  19498. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19499. VULKAN_HPP_CONSTEXPR ClearDepthStencilValue(float depth_ = {}, uint32_t stencil_ = {}) VULKAN_HPP_NOEXCEPT
  19500. : depth( depth_ ), stencil( stencil_ )
  19501. {}
  19502. VULKAN_HPP_CONSTEXPR ClearDepthStencilValue( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19503. ClearDepthStencilValue( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
  19504. : ClearDepthStencilValue( *reinterpret_cast<ClearDepthStencilValue const *>( &rhs ) )
  19505. {}
  19506. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19507. VULKAN_HPP_CONSTEXPR_14 ClearDepthStencilValue & operator=( ClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19508. ClearDepthStencilValue & operator=( VkClearDepthStencilValue const & rhs ) VULKAN_HPP_NOEXCEPT
  19509. {
  19510. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const *>( &rhs );
  19511. return *this;
  19512. }
  19513. ClearDepthStencilValue & setDepth( float depth_ ) VULKAN_HPP_NOEXCEPT
  19514. {
  19515. depth = depth_;
  19516. return *this;
  19517. }
  19518. ClearDepthStencilValue & setStencil( uint32_t stencil_ ) VULKAN_HPP_NOEXCEPT
  19519. {
  19520. stencil = stencil_;
  19521. return *this;
  19522. }
  19523. operator VkClearDepthStencilValue const&() const VULKAN_HPP_NOEXCEPT
  19524. {
  19525. return *reinterpret_cast<const VkClearDepthStencilValue*>( this );
  19526. }
  19527. operator VkClearDepthStencilValue &() VULKAN_HPP_NOEXCEPT
  19528. {
  19529. return *reinterpret_cast<VkClearDepthStencilValue*>( this );
  19530. }
  19531. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19532. auto operator<=>( ClearDepthStencilValue const& ) const = default;
  19533. #else
  19534. bool operator==( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
  19535. {
  19536. return ( depth == rhs.depth )
  19537. && ( stencil == rhs.stencil );
  19538. }
  19539. bool operator!=( ClearDepthStencilValue const& rhs ) const VULKAN_HPP_NOEXCEPT
  19540. {
  19541. return !operator==( rhs );
  19542. }
  19543. #endif
  19544. public:
  19545. float depth = {};
  19546. uint32_t stencil = {};
  19547. };
  19548. static_assert( sizeof( ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ), "struct and wrapper have different size!" );
  19549. static_assert( std::is_standard_layout<ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
  19550. union ClearValue
  19551. {
  19552. ClearValue( VULKAN_HPP_NAMESPACE::ClearValue const& rhs ) VULKAN_HPP_NOEXCEPT
  19553. {
  19554. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
  19555. }
  19556. ClearValue( VULKAN_HPP_NAMESPACE::ClearColorValue color_ = {} )
  19557. : color( color_ )
  19558. {}
  19559. ClearValue( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil_ )
  19560. : depthStencil( depthStencil_ )
  19561. {}
  19562. ClearValue & setColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & color_ ) VULKAN_HPP_NOEXCEPT
  19563. {
  19564. color = color_;
  19565. return *this;
  19566. }
  19567. ClearValue & setDepthStencil( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue const & depthStencil_ ) VULKAN_HPP_NOEXCEPT
  19568. {
  19569. depthStencil = depthStencil_;
  19570. return *this;
  19571. }
  19572. VULKAN_HPP_NAMESPACE::ClearValue & operator=( VULKAN_HPP_NAMESPACE::ClearValue const & rhs ) VULKAN_HPP_NOEXCEPT
  19573. {
  19574. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) );
  19575. return *this;
  19576. }
  19577. operator VkClearValue const&() const
  19578. {
  19579. return *reinterpret_cast<const VkClearValue*>(this);
  19580. }
  19581. operator VkClearValue &()
  19582. {
  19583. return *reinterpret_cast<VkClearValue*>(this);
  19584. }
  19585. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  19586. VULKAN_HPP_NAMESPACE::ClearColorValue color;
  19587. VULKAN_HPP_NAMESPACE::ClearDepthStencilValue depthStencil;
  19588. #else
  19589. VkClearColorValue color;
  19590. VkClearDepthStencilValue depthStencil;
  19591. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  19592. };
  19593. struct ClearAttachment
  19594. {
  19595. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19596. ClearAttachment(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t colorAttachment_ = {}, VULKAN_HPP_NAMESPACE::ClearValue clearValue_ = {}) VULKAN_HPP_NOEXCEPT
  19597. : aspectMask( aspectMask_ ), colorAttachment( colorAttachment_ ), clearValue( clearValue_ )
  19598. {}
  19599. ClearAttachment( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19600. ClearAttachment( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
  19601. : ClearAttachment( *reinterpret_cast<ClearAttachment const *>( &rhs ) )
  19602. {}
  19603. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19604. ClearAttachment & operator=( ClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19605. ClearAttachment & operator=( VkClearAttachment const & rhs ) VULKAN_HPP_NOEXCEPT
  19606. {
  19607. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearAttachment const *>( &rhs );
  19608. return *this;
  19609. }
  19610. ClearAttachment & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  19611. {
  19612. aspectMask = aspectMask_;
  19613. return *this;
  19614. }
  19615. ClearAttachment & setColorAttachment( uint32_t colorAttachment_ ) VULKAN_HPP_NOEXCEPT
  19616. {
  19617. colorAttachment = colorAttachment_;
  19618. return *this;
  19619. }
  19620. ClearAttachment & setClearValue( VULKAN_HPP_NAMESPACE::ClearValue const & clearValue_ ) VULKAN_HPP_NOEXCEPT
  19621. {
  19622. clearValue = clearValue_;
  19623. return *this;
  19624. }
  19625. operator VkClearAttachment const&() const VULKAN_HPP_NOEXCEPT
  19626. {
  19627. return *reinterpret_cast<const VkClearAttachment*>( this );
  19628. }
  19629. operator VkClearAttachment &() VULKAN_HPP_NOEXCEPT
  19630. {
  19631. return *reinterpret_cast<VkClearAttachment*>( this );
  19632. }
  19633. public:
  19634. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  19635. uint32_t colorAttachment = {};
  19636. VULKAN_HPP_NAMESPACE::ClearValue clearValue = {};
  19637. };
  19638. static_assert( sizeof( ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
  19639. static_assert( std::is_standard_layout<ClearAttachment>::value, "struct wrapper is not a standard layout!" );
  19640. struct ClearRect
  19641. {
  19642. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19643. VULKAN_HPP_CONSTEXPR ClearRect(VULKAN_HPP_NAMESPACE::Rect2D rect_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
  19644. : rect( rect_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
  19645. {}
  19646. VULKAN_HPP_CONSTEXPR ClearRect( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19647. ClearRect( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
  19648. : ClearRect( *reinterpret_cast<ClearRect const *>( &rhs ) )
  19649. {}
  19650. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19651. VULKAN_HPP_CONSTEXPR_14 ClearRect & operator=( ClearRect const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19652. ClearRect & operator=( VkClearRect const & rhs ) VULKAN_HPP_NOEXCEPT
  19653. {
  19654. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ClearRect const *>( &rhs );
  19655. return *this;
  19656. }
  19657. ClearRect & setRect( VULKAN_HPP_NAMESPACE::Rect2D const & rect_ ) VULKAN_HPP_NOEXCEPT
  19658. {
  19659. rect = rect_;
  19660. return *this;
  19661. }
  19662. ClearRect & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
  19663. {
  19664. baseArrayLayer = baseArrayLayer_;
  19665. return *this;
  19666. }
  19667. ClearRect & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
  19668. {
  19669. layerCount = layerCount_;
  19670. return *this;
  19671. }
  19672. operator VkClearRect const&() const VULKAN_HPP_NOEXCEPT
  19673. {
  19674. return *reinterpret_cast<const VkClearRect*>( this );
  19675. }
  19676. operator VkClearRect &() VULKAN_HPP_NOEXCEPT
  19677. {
  19678. return *reinterpret_cast<VkClearRect*>( this );
  19679. }
  19680. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19681. auto operator<=>( ClearRect const& ) const = default;
  19682. #else
  19683. bool operator==( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
  19684. {
  19685. return ( rect == rhs.rect )
  19686. && ( baseArrayLayer == rhs.baseArrayLayer )
  19687. && ( layerCount == rhs.layerCount );
  19688. }
  19689. bool operator!=( ClearRect const& rhs ) const VULKAN_HPP_NOEXCEPT
  19690. {
  19691. return !operator==( rhs );
  19692. }
  19693. #endif
  19694. public:
  19695. VULKAN_HPP_NAMESPACE::Rect2D rect = {};
  19696. uint32_t baseArrayLayer = {};
  19697. uint32_t layerCount = {};
  19698. };
  19699. static_assert( sizeof( ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
  19700. static_assert( std::is_standard_layout<ClearRect>::value, "struct wrapper is not a standard layout!" );
  19701. struct CoarseSampleLocationNV
  19702. {
  19703. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19704. VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV(uint32_t pixelX_ = {}, uint32_t pixelY_ = {}, uint32_t sample_ = {}) VULKAN_HPP_NOEXCEPT
  19705. : pixelX( pixelX_ ), pixelY( pixelY_ ), sample( sample_ )
  19706. {}
  19707. VULKAN_HPP_CONSTEXPR CoarseSampleLocationNV( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19708. CoarseSampleLocationNV( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19709. : CoarseSampleLocationNV( *reinterpret_cast<CoarseSampleLocationNV const *>( &rhs ) )
  19710. {}
  19711. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19712. VULKAN_HPP_CONSTEXPR_14 CoarseSampleLocationNV & operator=( CoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19713. CoarseSampleLocationNV & operator=( VkCoarseSampleLocationNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19714. {
  19715. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV const *>( &rhs );
  19716. return *this;
  19717. }
  19718. CoarseSampleLocationNV & setPixelX( uint32_t pixelX_ ) VULKAN_HPP_NOEXCEPT
  19719. {
  19720. pixelX = pixelX_;
  19721. return *this;
  19722. }
  19723. CoarseSampleLocationNV & setPixelY( uint32_t pixelY_ ) VULKAN_HPP_NOEXCEPT
  19724. {
  19725. pixelY = pixelY_;
  19726. return *this;
  19727. }
  19728. CoarseSampleLocationNV & setSample( uint32_t sample_ ) VULKAN_HPP_NOEXCEPT
  19729. {
  19730. sample = sample_;
  19731. return *this;
  19732. }
  19733. operator VkCoarseSampleLocationNV const&() const VULKAN_HPP_NOEXCEPT
  19734. {
  19735. return *reinterpret_cast<const VkCoarseSampleLocationNV*>( this );
  19736. }
  19737. operator VkCoarseSampleLocationNV &() VULKAN_HPP_NOEXCEPT
  19738. {
  19739. return *reinterpret_cast<VkCoarseSampleLocationNV*>( this );
  19740. }
  19741. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19742. auto operator<=>( CoarseSampleLocationNV const& ) const = default;
  19743. #else
  19744. bool operator==( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19745. {
  19746. return ( pixelX == rhs.pixelX )
  19747. && ( pixelY == rhs.pixelY )
  19748. && ( sample == rhs.sample );
  19749. }
  19750. bool operator!=( CoarseSampleLocationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19751. {
  19752. return !operator==( rhs );
  19753. }
  19754. #endif
  19755. public:
  19756. uint32_t pixelX = {};
  19757. uint32_t pixelY = {};
  19758. uint32_t sample = {};
  19759. };
  19760. static_assert( sizeof( CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ), "struct and wrapper have different size!" );
  19761. static_assert( std::is_standard_layout<CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
  19762. struct CoarseSampleOrderCustomNV
  19763. {
  19764. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19765. VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV(VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations, uint32_t sampleCount_ = {}, uint32_t sampleLocationCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
  19766. : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( sampleLocationCount_ ), pSampleLocations( pSampleLocations_ )
  19767. {}
  19768. VULKAN_HPP_CONSTEXPR CoarseSampleOrderCustomNV( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19769. CoarseSampleOrderCustomNV( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19770. : CoarseSampleOrderCustomNV( *reinterpret_cast<CoarseSampleOrderCustomNV const *>( &rhs ) )
  19771. {}
  19772. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  19773. CoarseSampleOrderCustomNV( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_, uint32_t sampleCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ )
  19774. : shadingRate( shadingRate_ ), sampleCount( sampleCount_ ), sampleLocationCount( static_cast<uint32_t>( sampleLocations_.size() ) ), pSampleLocations( sampleLocations_.data() )
  19775. {}
  19776. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  19777. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19778. VULKAN_HPP_CONSTEXPR_14 CoarseSampleOrderCustomNV & operator=( CoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19779. CoarseSampleOrderCustomNV & operator=( VkCoarseSampleOrderCustomNV const & rhs ) VULKAN_HPP_NOEXCEPT
  19780. {
  19781. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV const *>( &rhs );
  19782. return *this;
  19783. }
  19784. CoarseSampleOrderCustomNV & setShadingRate( VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
  19785. {
  19786. shadingRate = shadingRate_;
  19787. return *this;
  19788. }
  19789. CoarseSampleOrderCustomNV & setSampleCount( uint32_t sampleCount_ ) VULKAN_HPP_NOEXCEPT
  19790. {
  19791. sampleCount = sampleCount_;
  19792. return *this;
  19793. }
  19794. CoarseSampleOrderCustomNV & setSampleLocationCount( uint32_t sampleLocationCount_ ) VULKAN_HPP_NOEXCEPT
  19795. {
  19796. sampleLocationCount = sampleLocationCount_;
  19797. return *this;
  19798. }
  19799. CoarseSampleOrderCustomNV & setPSampleLocations( const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  19800. {
  19801. pSampleLocations = pSampleLocations_;
  19802. return *this;
  19803. }
  19804. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  19805. CoarseSampleOrderCustomNV & setSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV> const & sampleLocations_ ) VULKAN_HPP_NOEXCEPT
  19806. {
  19807. sampleLocationCount = static_cast<uint32_t>( sampleLocations_.size() );
  19808. pSampleLocations = sampleLocations_.data();
  19809. return *this;
  19810. }
  19811. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  19812. operator VkCoarseSampleOrderCustomNV const&() const VULKAN_HPP_NOEXCEPT
  19813. {
  19814. return *reinterpret_cast<const VkCoarseSampleOrderCustomNV*>( this );
  19815. }
  19816. operator VkCoarseSampleOrderCustomNV &() VULKAN_HPP_NOEXCEPT
  19817. {
  19818. return *reinterpret_cast<VkCoarseSampleOrderCustomNV*>( this );
  19819. }
  19820. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19821. auto operator<=>( CoarseSampleOrderCustomNV const& ) const = default;
  19822. #else
  19823. bool operator==( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19824. {
  19825. return ( shadingRate == rhs.shadingRate )
  19826. && ( sampleCount == rhs.sampleCount )
  19827. && ( sampleLocationCount == rhs.sampleLocationCount )
  19828. && ( pSampleLocations == rhs.pSampleLocations );
  19829. }
  19830. bool operator!=( CoarseSampleOrderCustomNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  19831. {
  19832. return !operator==( rhs );
  19833. }
  19834. #endif
  19835. public:
  19836. VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV shadingRate = VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV::eNoInvocations;
  19837. uint32_t sampleCount = {};
  19838. uint32_t sampleLocationCount = {};
  19839. const VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV* pSampleLocations = {};
  19840. };
  19841. static_assert( sizeof( CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ), "struct and wrapper have different size!" );
  19842. static_assert( std::is_standard_layout<CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
  19843. class CommandPool
  19844. {
  19845. public:
  19846. using CType = VkCommandPool;
  19847. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool;
  19848. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool;
  19849. public:
  19850. VULKAN_HPP_CONSTEXPR CommandPool() VULKAN_HPP_NOEXCEPT
  19851. : m_commandPool(VK_NULL_HANDLE)
  19852. {}
  19853. VULKAN_HPP_CONSTEXPR CommandPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  19854. : m_commandPool(VK_NULL_HANDLE)
  19855. {}
  19856. VULKAN_HPP_TYPESAFE_EXPLICIT CommandPool( VkCommandPool commandPool ) VULKAN_HPP_NOEXCEPT
  19857. : m_commandPool( commandPool )
  19858. {}
  19859. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  19860. CommandPool & operator=(VkCommandPool commandPool) VULKAN_HPP_NOEXCEPT
  19861. {
  19862. m_commandPool = commandPool;
  19863. return *this;
  19864. }
  19865. #endif
  19866. CommandPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  19867. {
  19868. m_commandPool = VK_NULL_HANDLE;
  19869. return *this;
  19870. }
  19871. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19872. auto operator<=>( CommandPool const& ) const = default;
  19873. #else
  19874. bool operator==( CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  19875. {
  19876. return m_commandPool == rhs.m_commandPool;
  19877. }
  19878. bool operator!=(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  19879. {
  19880. return m_commandPool != rhs.m_commandPool;
  19881. }
  19882. bool operator<(CommandPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  19883. {
  19884. return m_commandPool < rhs.m_commandPool;
  19885. }
  19886. #endif
  19887. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandPool() const VULKAN_HPP_NOEXCEPT
  19888. {
  19889. return m_commandPool;
  19890. }
  19891. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  19892. {
  19893. return m_commandPool != VK_NULL_HANDLE;
  19894. }
  19895. bool operator!() const VULKAN_HPP_NOEXCEPT
  19896. {
  19897. return m_commandPool == VK_NULL_HANDLE;
  19898. }
  19899. private:
  19900. VkCommandPool m_commandPool;
  19901. };
  19902. static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
  19903. template <>
  19904. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandPool>
  19905. {
  19906. using type = VULKAN_HPP_NAMESPACE::CommandPool;
  19907. };
  19908. template <>
  19909. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandPool>
  19910. {
  19911. using Type = VULKAN_HPP_NAMESPACE::CommandPool;
  19912. };
  19913. template <>
  19914. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandPool>
  19915. {
  19916. using Type = VULKAN_HPP_NAMESPACE::CommandPool;
  19917. };
  19918. template <>
  19919. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandPool>
  19920. {
  19921. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  19922. };
  19923. struct CommandBufferAllocateInfo
  19924. {
  19925. static const bool allowDuplicate = false;
  19926. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferAllocateInfo;
  19927. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19928. VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo(VULKAN_HPP_NAMESPACE::CommandPool commandPool_ = {}, VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary, uint32_t commandBufferCount_ = {}) VULKAN_HPP_NOEXCEPT
  19929. : commandPool( commandPool_ ), level( level_ ), commandBufferCount( commandBufferCount_ )
  19930. {}
  19931. VULKAN_HPP_CONSTEXPR CommandBufferAllocateInfo( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19932. CommandBufferAllocateInfo( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19933. : CommandBufferAllocateInfo( *reinterpret_cast<CommandBufferAllocateInfo const *>( &rhs ) )
  19934. {}
  19935. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  19936. VULKAN_HPP_CONSTEXPR_14 CommandBufferAllocateInfo & operator=( CommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  19937. CommandBufferAllocateInfo & operator=( VkCommandBufferAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  19938. {
  19939. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo const *>( &rhs );
  19940. return *this;
  19941. }
  19942. CommandBufferAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  19943. {
  19944. pNext = pNext_;
  19945. return *this;
  19946. }
  19947. CommandBufferAllocateInfo & setCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool_ ) VULKAN_HPP_NOEXCEPT
  19948. {
  19949. commandPool = commandPool_;
  19950. return *this;
  19951. }
  19952. CommandBufferAllocateInfo & setLevel( VULKAN_HPP_NAMESPACE::CommandBufferLevel level_ ) VULKAN_HPP_NOEXCEPT
  19953. {
  19954. level = level_;
  19955. return *this;
  19956. }
  19957. CommandBufferAllocateInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
  19958. {
  19959. commandBufferCount = commandBufferCount_;
  19960. return *this;
  19961. }
  19962. operator VkCommandBufferAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  19963. {
  19964. return *reinterpret_cast<const VkCommandBufferAllocateInfo*>( this );
  19965. }
  19966. operator VkCommandBufferAllocateInfo &() VULKAN_HPP_NOEXCEPT
  19967. {
  19968. return *reinterpret_cast<VkCommandBufferAllocateInfo*>( this );
  19969. }
  19970. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  19971. auto operator<=>( CommandBufferAllocateInfo const& ) const = default;
  19972. #else
  19973. bool operator==( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19974. {
  19975. return ( sType == rhs.sType )
  19976. && ( pNext == rhs.pNext )
  19977. && ( commandPool == rhs.commandPool )
  19978. && ( level == rhs.level )
  19979. && ( commandBufferCount == rhs.commandBufferCount );
  19980. }
  19981. bool operator!=( CommandBufferAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  19982. {
  19983. return !operator==( rhs );
  19984. }
  19985. #endif
  19986. public:
  19987. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferAllocateInfo;
  19988. const void* pNext = {};
  19989. VULKAN_HPP_NAMESPACE::CommandPool commandPool = {};
  19990. VULKAN_HPP_NAMESPACE::CommandBufferLevel level = VULKAN_HPP_NAMESPACE::CommandBufferLevel::ePrimary;
  19991. uint32_t commandBufferCount = {};
  19992. };
  19993. static_assert( sizeof( CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ), "struct and wrapper have different size!" );
  19994. static_assert( std::is_standard_layout<CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  19995. template <>
  19996. struct CppType<StructureType, StructureType::eCommandBufferAllocateInfo>
  19997. {
  19998. using Type = CommandBufferAllocateInfo;
  19999. };
  20000. class RenderPass
  20001. {
  20002. public:
  20003. using CType = VkRenderPass;
  20004. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass;
  20005. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass;
  20006. public:
  20007. VULKAN_HPP_CONSTEXPR RenderPass() VULKAN_HPP_NOEXCEPT
  20008. : m_renderPass(VK_NULL_HANDLE)
  20009. {}
  20010. VULKAN_HPP_CONSTEXPR RenderPass( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20011. : m_renderPass(VK_NULL_HANDLE)
  20012. {}
  20013. VULKAN_HPP_TYPESAFE_EXPLICIT RenderPass( VkRenderPass renderPass ) VULKAN_HPP_NOEXCEPT
  20014. : m_renderPass( renderPass )
  20015. {}
  20016. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  20017. RenderPass & operator=(VkRenderPass renderPass) VULKAN_HPP_NOEXCEPT
  20018. {
  20019. m_renderPass = renderPass;
  20020. return *this;
  20021. }
  20022. #endif
  20023. RenderPass & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20024. {
  20025. m_renderPass = VK_NULL_HANDLE;
  20026. return *this;
  20027. }
  20028. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20029. auto operator<=>( RenderPass const& ) const = default;
  20030. #else
  20031. bool operator==( RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
  20032. {
  20033. return m_renderPass == rhs.m_renderPass;
  20034. }
  20035. bool operator!=(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
  20036. {
  20037. return m_renderPass != rhs.m_renderPass;
  20038. }
  20039. bool operator<(RenderPass const & rhs ) const VULKAN_HPP_NOEXCEPT
  20040. {
  20041. return m_renderPass < rhs.m_renderPass;
  20042. }
  20043. #endif
  20044. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkRenderPass() const VULKAN_HPP_NOEXCEPT
  20045. {
  20046. return m_renderPass;
  20047. }
  20048. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  20049. {
  20050. return m_renderPass != VK_NULL_HANDLE;
  20051. }
  20052. bool operator!() const VULKAN_HPP_NOEXCEPT
  20053. {
  20054. return m_renderPass == VK_NULL_HANDLE;
  20055. }
  20056. private:
  20057. VkRenderPass m_renderPass;
  20058. };
  20059. static_assert( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
  20060. template <>
  20061. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eRenderPass>
  20062. {
  20063. using type = VULKAN_HPP_NAMESPACE::RenderPass;
  20064. };
  20065. template <>
  20066. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eRenderPass>
  20067. {
  20068. using Type = VULKAN_HPP_NAMESPACE::RenderPass;
  20069. };
  20070. template <>
  20071. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eRenderPass>
  20072. {
  20073. using Type = VULKAN_HPP_NAMESPACE::RenderPass;
  20074. };
  20075. template <>
  20076. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::RenderPass>
  20077. {
  20078. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  20079. };
  20080. class Framebuffer
  20081. {
  20082. public:
  20083. using CType = VkFramebuffer;
  20084. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer;
  20085. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer;
  20086. public:
  20087. VULKAN_HPP_CONSTEXPR Framebuffer() VULKAN_HPP_NOEXCEPT
  20088. : m_framebuffer(VK_NULL_HANDLE)
  20089. {}
  20090. VULKAN_HPP_CONSTEXPR Framebuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20091. : m_framebuffer(VK_NULL_HANDLE)
  20092. {}
  20093. VULKAN_HPP_TYPESAFE_EXPLICIT Framebuffer( VkFramebuffer framebuffer ) VULKAN_HPP_NOEXCEPT
  20094. : m_framebuffer( framebuffer )
  20095. {}
  20096. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  20097. Framebuffer & operator=(VkFramebuffer framebuffer) VULKAN_HPP_NOEXCEPT
  20098. {
  20099. m_framebuffer = framebuffer;
  20100. return *this;
  20101. }
  20102. #endif
  20103. Framebuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20104. {
  20105. m_framebuffer = VK_NULL_HANDLE;
  20106. return *this;
  20107. }
  20108. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20109. auto operator<=>( Framebuffer const& ) const = default;
  20110. #else
  20111. bool operator==( Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  20112. {
  20113. return m_framebuffer == rhs.m_framebuffer;
  20114. }
  20115. bool operator!=(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  20116. {
  20117. return m_framebuffer != rhs.m_framebuffer;
  20118. }
  20119. bool operator<(Framebuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  20120. {
  20121. return m_framebuffer < rhs.m_framebuffer;
  20122. }
  20123. #endif
  20124. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkFramebuffer() const VULKAN_HPP_NOEXCEPT
  20125. {
  20126. return m_framebuffer;
  20127. }
  20128. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  20129. {
  20130. return m_framebuffer != VK_NULL_HANDLE;
  20131. }
  20132. bool operator!() const VULKAN_HPP_NOEXCEPT
  20133. {
  20134. return m_framebuffer == VK_NULL_HANDLE;
  20135. }
  20136. private:
  20137. VkFramebuffer m_framebuffer;
  20138. };
  20139. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
  20140. template <>
  20141. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eFramebuffer>
  20142. {
  20143. using type = VULKAN_HPP_NAMESPACE::Framebuffer;
  20144. };
  20145. template <>
  20146. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eFramebuffer>
  20147. {
  20148. using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
  20149. };
  20150. template <>
  20151. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eFramebuffer>
  20152. {
  20153. using Type = VULKAN_HPP_NAMESPACE::Framebuffer;
  20154. };
  20155. template <>
  20156. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Framebuffer>
  20157. {
  20158. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  20159. };
  20160. struct CommandBufferInheritanceInfo
  20161. {
  20162. static const bool allowDuplicate = false;
  20163. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceInfo;
  20164. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20165. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ = {}, VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
  20166. : renderPass( renderPass_ ), subpass( subpass_ ), framebuffer( framebuffer_ ), occlusionQueryEnable( occlusionQueryEnable_ ), queryFlags( queryFlags_ ), pipelineStatistics( pipelineStatistics_ )
  20167. {}
  20168. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceInfo( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20169. CommandBufferInheritanceInfo( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20170. : CommandBufferInheritanceInfo( *reinterpret_cast<CommandBufferInheritanceInfo const *>( &rhs ) )
  20171. {}
  20172. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20173. VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceInfo & operator=( CommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20174. CommandBufferInheritanceInfo & operator=( VkCommandBufferInheritanceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20175. {
  20176. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo const *>( &rhs );
  20177. return *this;
  20178. }
  20179. CommandBufferInheritanceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20180. {
  20181. pNext = pNext_;
  20182. return *this;
  20183. }
  20184. CommandBufferInheritanceInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
  20185. {
  20186. renderPass = renderPass_;
  20187. return *this;
  20188. }
  20189. CommandBufferInheritanceInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
  20190. {
  20191. subpass = subpass_;
  20192. return *this;
  20193. }
  20194. CommandBufferInheritanceInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
  20195. {
  20196. framebuffer = framebuffer_;
  20197. return *this;
  20198. }
  20199. CommandBufferInheritanceInfo & setOcclusionQueryEnable( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable_ ) VULKAN_HPP_NOEXCEPT
  20200. {
  20201. occlusionQueryEnable = occlusionQueryEnable_;
  20202. return *this;
  20203. }
  20204. CommandBufferInheritanceInfo & setQueryFlags( VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags_ ) VULKAN_HPP_NOEXCEPT
  20205. {
  20206. queryFlags = queryFlags_;
  20207. return *this;
  20208. }
  20209. CommandBufferInheritanceInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
  20210. {
  20211. pipelineStatistics = pipelineStatistics_;
  20212. return *this;
  20213. }
  20214. operator VkCommandBufferInheritanceInfo const&() const VULKAN_HPP_NOEXCEPT
  20215. {
  20216. return *reinterpret_cast<const VkCommandBufferInheritanceInfo*>( this );
  20217. }
  20218. operator VkCommandBufferInheritanceInfo &() VULKAN_HPP_NOEXCEPT
  20219. {
  20220. return *reinterpret_cast<VkCommandBufferInheritanceInfo*>( this );
  20221. }
  20222. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20223. auto operator<=>( CommandBufferInheritanceInfo const& ) const = default;
  20224. #else
  20225. bool operator==( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20226. {
  20227. return ( sType == rhs.sType )
  20228. && ( pNext == rhs.pNext )
  20229. && ( renderPass == rhs.renderPass )
  20230. && ( subpass == rhs.subpass )
  20231. && ( framebuffer == rhs.framebuffer )
  20232. && ( occlusionQueryEnable == rhs.occlusionQueryEnable )
  20233. && ( queryFlags == rhs.queryFlags )
  20234. && ( pipelineStatistics == rhs.pipelineStatistics );
  20235. }
  20236. bool operator!=( CommandBufferInheritanceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20237. {
  20238. return !operator==( rhs );
  20239. }
  20240. #endif
  20241. public:
  20242. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceInfo;
  20243. const void* pNext = {};
  20244. VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
  20245. uint32_t subpass = {};
  20246. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
  20247. VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryEnable = {};
  20248. VULKAN_HPP_NAMESPACE::QueryControlFlags queryFlags = {};
  20249. VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
  20250. };
  20251. static_assert( sizeof( CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ), "struct and wrapper have different size!" );
  20252. static_assert( std::is_standard_layout<CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
  20253. template <>
  20254. struct CppType<StructureType, StructureType::eCommandBufferInheritanceInfo>
  20255. {
  20256. using Type = CommandBufferInheritanceInfo;
  20257. };
  20258. struct CommandBufferBeginInfo
  20259. {
  20260. static const bool allowDuplicate = false;
  20261. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferBeginInfo;
  20262. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20263. VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo(VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ = {}) VULKAN_HPP_NOEXCEPT
  20264. : flags( flags_ ), pInheritanceInfo( pInheritanceInfo_ )
  20265. {}
  20266. VULKAN_HPP_CONSTEXPR CommandBufferBeginInfo( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20267. CommandBufferBeginInfo( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20268. : CommandBufferBeginInfo( *reinterpret_cast<CommandBufferBeginInfo const *>( &rhs ) )
  20269. {}
  20270. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20271. VULKAN_HPP_CONSTEXPR_14 CommandBufferBeginInfo & operator=( CommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20272. CommandBufferBeginInfo & operator=( VkCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20273. {
  20274. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo const *>( &rhs );
  20275. return *this;
  20276. }
  20277. CommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20278. {
  20279. pNext = pNext_;
  20280. return *this;
  20281. }
  20282. CommandBufferBeginInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags_ ) VULKAN_HPP_NOEXCEPT
  20283. {
  20284. flags = flags_;
  20285. return *this;
  20286. }
  20287. CommandBufferBeginInfo & setPInheritanceInfo( const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo_ ) VULKAN_HPP_NOEXCEPT
  20288. {
  20289. pInheritanceInfo = pInheritanceInfo_;
  20290. return *this;
  20291. }
  20292. operator VkCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  20293. {
  20294. return *reinterpret_cast<const VkCommandBufferBeginInfo*>( this );
  20295. }
  20296. operator VkCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
  20297. {
  20298. return *reinterpret_cast<VkCommandBufferBeginInfo*>( this );
  20299. }
  20300. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20301. auto operator<=>( CommandBufferBeginInfo const& ) const = default;
  20302. #else
  20303. bool operator==( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20304. {
  20305. return ( sType == rhs.sType )
  20306. && ( pNext == rhs.pNext )
  20307. && ( flags == rhs.flags )
  20308. && ( pInheritanceInfo == rhs.pInheritanceInfo );
  20309. }
  20310. bool operator!=( CommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20311. {
  20312. return !operator==( rhs );
  20313. }
  20314. #endif
  20315. public:
  20316. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferBeginInfo;
  20317. const void* pNext = {};
  20318. VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags flags = {};
  20319. const VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo* pInheritanceInfo = {};
  20320. };
  20321. static_assert( sizeof( CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ), "struct and wrapper have different size!" );
  20322. static_assert( std::is_standard_layout<CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
  20323. template <>
  20324. struct CppType<StructureType, StructureType::eCommandBufferBeginInfo>
  20325. {
  20326. using Type = CommandBufferBeginInfo;
  20327. };
  20328. struct CommandBufferInheritanceConditionalRenderingInfoEXT
  20329. {
  20330. static const bool allowDuplicate = false;
  20331. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
  20332. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20333. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ = {}) VULKAN_HPP_NOEXCEPT
  20334. : conditionalRenderingEnable( conditionalRenderingEnable_ )
  20335. {}
  20336. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceConditionalRenderingInfoEXT( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20337. CommandBufferInheritanceConditionalRenderingInfoEXT( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  20338. : CommandBufferInheritanceConditionalRenderingInfoEXT( *reinterpret_cast<CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs ) )
  20339. {}
  20340. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20341. VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( CommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20342. CommandBufferInheritanceConditionalRenderingInfoEXT & operator=( VkCommandBufferInheritanceConditionalRenderingInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  20343. {
  20344. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT const *>( &rhs );
  20345. return *this;
  20346. }
  20347. CommandBufferInheritanceConditionalRenderingInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20348. {
  20349. pNext = pNext_;
  20350. return *this;
  20351. }
  20352. CommandBufferInheritanceConditionalRenderingInfoEXT & setConditionalRenderingEnable( VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable_ ) VULKAN_HPP_NOEXCEPT
  20353. {
  20354. conditionalRenderingEnable = conditionalRenderingEnable_;
  20355. return *this;
  20356. }
  20357. operator VkCommandBufferInheritanceConditionalRenderingInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  20358. {
  20359. return *reinterpret_cast<const VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
  20360. }
  20361. operator VkCommandBufferInheritanceConditionalRenderingInfoEXT &() VULKAN_HPP_NOEXCEPT
  20362. {
  20363. return *reinterpret_cast<VkCommandBufferInheritanceConditionalRenderingInfoEXT*>( this );
  20364. }
  20365. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20366. auto operator<=>( CommandBufferInheritanceConditionalRenderingInfoEXT const& ) const = default;
  20367. #else
  20368. bool operator==( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  20369. {
  20370. return ( sType == rhs.sType )
  20371. && ( pNext == rhs.pNext )
  20372. && ( conditionalRenderingEnable == rhs.conditionalRenderingEnable );
  20373. }
  20374. bool operator!=( CommandBufferInheritanceConditionalRenderingInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  20375. {
  20376. return !operator==( rhs );
  20377. }
  20378. #endif
  20379. public:
  20380. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT;
  20381. const void* pNext = {};
  20382. VULKAN_HPP_NAMESPACE::Bool32 conditionalRenderingEnable = {};
  20383. };
  20384. static_assert( sizeof( CommandBufferInheritanceConditionalRenderingInfoEXT ) == sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ), "struct and wrapper have different size!" );
  20385. static_assert( std::is_standard_layout<CommandBufferInheritanceConditionalRenderingInfoEXT>::value, "struct wrapper is not a standard layout!" );
  20386. template <>
  20387. struct CppType<StructureType, StructureType::eCommandBufferInheritanceConditionalRenderingInfoEXT>
  20388. {
  20389. using Type = CommandBufferInheritanceConditionalRenderingInfoEXT;
  20390. };
  20391. struct CommandBufferInheritanceRenderPassTransformInfoQCOM
  20392. {
  20393. static const bool allowDuplicate = false;
  20394. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
  20395. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20396. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}) VULKAN_HPP_NOEXCEPT
  20397. : transform( transform_ ), renderArea( renderArea_ )
  20398. {}
  20399. VULKAN_HPP_CONSTEXPR CommandBufferInheritanceRenderPassTransformInfoQCOM( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20400. CommandBufferInheritanceRenderPassTransformInfoQCOM( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  20401. : CommandBufferInheritanceRenderPassTransformInfoQCOM( *reinterpret_cast<CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs ) )
  20402. {}
  20403. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20404. VULKAN_HPP_CONSTEXPR_14 CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( CommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20405. CommandBufferInheritanceRenderPassTransformInfoQCOM & operator=( VkCommandBufferInheritanceRenderPassTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  20406. {
  20407. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM const *>( &rhs );
  20408. return *this;
  20409. }
  20410. CommandBufferInheritanceRenderPassTransformInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20411. {
  20412. pNext = pNext_;
  20413. return *this;
  20414. }
  20415. CommandBufferInheritanceRenderPassTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
  20416. {
  20417. transform = transform_;
  20418. return *this;
  20419. }
  20420. CommandBufferInheritanceRenderPassTransformInfoQCOM & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
  20421. {
  20422. renderArea = renderArea_;
  20423. return *this;
  20424. }
  20425. operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
  20426. {
  20427. return *reinterpret_cast<const VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
  20428. }
  20429. operator VkCommandBufferInheritanceRenderPassTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
  20430. {
  20431. return *reinterpret_cast<VkCommandBufferInheritanceRenderPassTransformInfoQCOM*>( this );
  20432. }
  20433. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20434. auto operator<=>( CommandBufferInheritanceRenderPassTransformInfoQCOM const& ) const = default;
  20435. #else
  20436. bool operator==( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  20437. {
  20438. return ( sType == rhs.sType )
  20439. && ( pNext == rhs.pNext )
  20440. && ( transform == rhs.transform )
  20441. && ( renderArea == rhs.renderArea );
  20442. }
  20443. bool operator!=( CommandBufferInheritanceRenderPassTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  20444. {
  20445. return !operator==( rhs );
  20446. }
  20447. #endif
  20448. public:
  20449. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM;
  20450. void* pNext = {};
  20451. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  20452. VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
  20453. };
  20454. static_assert( sizeof( CommandBufferInheritanceRenderPassTransformInfoQCOM ) == sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ), "struct and wrapper have different size!" );
  20455. static_assert( std::is_standard_layout<CommandBufferInheritanceRenderPassTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
  20456. template <>
  20457. struct CppType<StructureType, StructureType::eCommandBufferInheritanceRenderPassTransformInfoQCOM>
  20458. {
  20459. using Type = CommandBufferInheritanceRenderPassTransformInfoQCOM;
  20460. };
  20461. struct CommandPoolCreateInfo
  20462. {
  20463. static const bool allowDuplicate = false;
  20464. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCommandPoolCreateInfo;
  20465. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20466. VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo(VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}) VULKAN_HPP_NOEXCEPT
  20467. : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ )
  20468. {}
  20469. VULKAN_HPP_CONSTEXPR CommandPoolCreateInfo( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20470. CommandPoolCreateInfo( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20471. : CommandPoolCreateInfo( *reinterpret_cast<CommandPoolCreateInfo const *>( &rhs ) )
  20472. {}
  20473. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20474. VULKAN_HPP_CONSTEXPR_14 CommandPoolCreateInfo & operator=( CommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20475. CommandPoolCreateInfo & operator=( VkCommandPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20476. {
  20477. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo const *>( &rhs );
  20478. return *this;
  20479. }
  20480. CommandPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20481. {
  20482. pNext = pNext_;
  20483. return *this;
  20484. }
  20485. CommandPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  20486. {
  20487. flags = flags_;
  20488. return *this;
  20489. }
  20490. CommandPoolCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  20491. {
  20492. queueFamilyIndex = queueFamilyIndex_;
  20493. return *this;
  20494. }
  20495. operator VkCommandPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  20496. {
  20497. return *reinterpret_cast<const VkCommandPoolCreateInfo*>( this );
  20498. }
  20499. operator VkCommandPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
  20500. {
  20501. return *reinterpret_cast<VkCommandPoolCreateInfo*>( this );
  20502. }
  20503. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20504. auto operator<=>( CommandPoolCreateInfo const& ) const = default;
  20505. #else
  20506. bool operator==( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20507. {
  20508. return ( sType == rhs.sType )
  20509. && ( pNext == rhs.pNext )
  20510. && ( flags == rhs.flags )
  20511. && ( queueFamilyIndex == rhs.queueFamilyIndex );
  20512. }
  20513. bool operator!=( CommandPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20514. {
  20515. return !operator==( rhs );
  20516. }
  20517. #endif
  20518. public:
  20519. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCommandPoolCreateInfo;
  20520. const void* pNext = {};
  20521. VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags flags = {};
  20522. uint32_t queueFamilyIndex = {};
  20523. };
  20524. static_assert( sizeof( CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ), "struct and wrapper have different size!" );
  20525. static_assert( std::is_standard_layout<CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
  20526. template <>
  20527. struct CppType<StructureType, StructureType::eCommandPoolCreateInfo>
  20528. {
  20529. using Type = CommandPoolCreateInfo;
  20530. };
  20531. class ShaderModule
  20532. {
  20533. public:
  20534. using CType = VkShaderModule;
  20535. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule;
  20536. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule;
  20537. public:
  20538. VULKAN_HPP_CONSTEXPR ShaderModule() VULKAN_HPP_NOEXCEPT
  20539. : m_shaderModule(VK_NULL_HANDLE)
  20540. {}
  20541. VULKAN_HPP_CONSTEXPR ShaderModule( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20542. : m_shaderModule(VK_NULL_HANDLE)
  20543. {}
  20544. VULKAN_HPP_TYPESAFE_EXPLICIT ShaderModule( VkShaderModule shaderModule ) VULKAN_HPP_NOEXCEPT
  20545. : m_shaderModule( shaderModule )
  20546. {}
  20547. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  20548. ShaderModule & operator=(VkShaderModule shaderModule) VULKAN_HPP_NOEXCEPT
  20549. {
  20550. m_shaderModule = shaderModule;
  20551. return *this;
  20552. }
  20553. #endif
  20554. ShaderModule & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20555. {
  20556. m_shaderModule = VK_NULL_HANDLE;
  20557. return *this;
  20558. }
  20559. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20560. auto operator<=>( ShaderModule const& ) const = default;
  20561. #else
  20562. bool operator==( ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
  20563. {
  20564. return m_shaderModule == rhs.m_shaderModule;
  20565. }
  20566. bool operator!=(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
  20567. {
  20568. return m_shaderModule != rhs.m_shaderModule;
  20569. }
  20570. bool operator<(ShaderModule const & rhs ) const VULKAN_HPP_NOEXCEPT
  20571. {
  20572. return m_shaderModule < rhs.m_shaderModule;
  20573. }
  20574. #endif
  20575. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkShaderModule() const VULKAN_HPP_NOEXCEPT
  20576. {
  20577. return m_shaderModule;
  20578. }
  20579. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  20580. {
  20581. return m_shaderModule != VK_NULL_HANDLE;
  20582. }
  20583. bool operator!() const VULKAN_HPP_NOEXCEPT
  20584. {
  20585. return m_shaderModule == VK_NULL_HANDLE;
  20586. }
  20587. private:
  20588. VkShaderModule m_shaderModule;
  20589. };
  20590. static_assert( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
  20591. template <>
  20592. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eShaderModule>
  20593. {
  20594. using type = VULKAN_HPP_NAMESPACE::ShaderModule;
  20595. };
  20596. template <>
  20597. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eShaderModule>
  20598. {
  20599. using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
  20600. };
  20601. template <>
  20602. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eShaderModule>
  20603. {
  20604. using Type = VULKAN_HPP_NAMESPACE::ShaderModule;
  20605. };
  20606. template <>
  20607. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ShaderModule>
  20608. {
  20609. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  20610. };
  20611. struct SpecializationMapEntry
  20612. {
  20613. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20614. VULKAN_HPP_CONSTEXPR SpecializationMapEntry(uint32_t constantID_ = {}, uint32_t offset_ = {}, size_t size_ = {}) VULKAN_HPP_NOEXCEPT
  20615. : constantID( constantID_ ), offset( offset_ ), size( size_ )
  20616. {}
  20617. VULKAN_HPP_CONSTEXPR SpecializationMapEntry( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20618. SpecializationMapEntry( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
  20619. : SpecializationMapEntry( *reinterpret_cast<SpecializationMapEntry const *>( &rhs ) )
  20620. {}
  20621. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20622. VULKAN_HPP_CONSTEXPR_14 SpecializationMapEntry & operator=( SpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20623. SpecializationMapEntry & operator=( VkSpecializationMapEntry const & rhs ) VULKAN_HPP_NOEXCEPT
  20624. {
  20625. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationMapEntry const *>( &rhs );
  20626. return *this;
  20627. }
  20628. SpecializationMapEntry & setConstantID( uint32_t constantID_ ) VULKAN_HPP_NOEXCEPT
  20629. {
  20630. constantID = constantID_;
  20631. return *this;
  20632. }
  20633. SpecializationMapEntry & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
  20634. {
  20635. offset = offset_;
  20636. return *this;
  20637. }
  20638. SpecializationMapEntry & setSize( size_t size_ ) VULKAN_HPP_NOEXCEPT
  20639. {
  20640. size = size_;
  20641. return *this;
  20642. }
  20643. operator VkSpecializationMapEntry const&() const VULKAN_HPP_NOEXCEPT
  20644. {
  20645. return *reinterpret_cast<const VkSpecializationMapEntry*>( this );
  20646. }
  20647. operator VkSpecializationMapEntry &() VULKAN_HPP_NOEXCEPT
  20648. {
  20649. return *reinterpret_cast<VkSpecializationMapEntry*>( this );
  20650. }
  20651. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20652. auto operator<=>( SpecializationMapEntry const& ) const = default;
  20653. #else
  20654. bool operator==( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
  20655. {
  20656. return ( constantID == rhs.constantID )
  20657. && ( offset == rhs.offset )
  20658. && ( size == rhs.size );
  20659. }
  20660. bool operator!=( SpecializationMapEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
  20661. {
  20662. return !operator==( rhs );
  20663. }
  20664. #endif
  20665. public:
  20666. uint32_t constantID = {};
  20667. uint32_t offset = {};
  20668. size_t size = {};
  20669. };
  20670. static_assert( sizeof( SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ), "struct and wrapper have different size!" );
  20671. static_assert( std::is_standard_layout<SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
  20672. struct SpecializationInfo
  20673. {
  20674. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20675. VULKAN_HPP_CONSTEXPR SpecializationInfo(uint32_t mapEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ = {}, size_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
  20676. : mapEntryCount( mapEntryCount_ ), pMapEntries( pMapEntries_ ), dataSize( dataSize_ ), pData( pData_ )
  20677. {}
  20678. VULKAN_HPP_CONSTEXPR SpecializationInfo( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20679. SpecializationInfo( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20680. : SpecializationInfo( *reinterpret_cast<SpecializationInfo const *>( &rhs ) )
  20681. {}
  20682. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20683. template <typename T>
  20684. SpecializationInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ = {} )
  20685. : mapEntryCount( static_cast<uint32_t>( mapEntries_.size() ) ), pMapEntries( mapEntries_.data() ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
  20686. {}
  20687. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20688. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20689. VULKAN_HPP_CONSTEXPR_14 SpecializationInfo & operator=( SpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20690. SpecializationInfo & operator=( VkSpecializationInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20691. {
  20692. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SpecializationInfo const *>( &rhs );
  20693. return *this;
  20694. }
  20695. SpecializationInfo & setMapEntryCount( uint32_t mapEntryCount_ ) VULKAN_HPP_NOEXCEPT
  20696. {
  20697. mapEntryCount = mapEntryCount_;
  20698. return *this;
  20699. }
  20700. SpecializationInfo & setPMapEntries( const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries_ ) VULKAN_HPP_NOEXCEPT
  20701. {
  20702. pMapEntries = pMapEntries_;
  20703. return *this;
  20704. }
  20705. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20706. SpecializationInfo & setMapEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SpecializationMapEntry> const & mapEntries_ ) VULKAN_HPP_NOEXCEPT
  20707. {
  20708. mapEntryCount = static_cast<uint32_t>( mapEntries_.size() );
  20709. pMapEntries = mapEntries_.data();
  20710. return *this;
  20711. }
  20712. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20713. SpecializationInfo & setDataSize( size_t dataSize_ ) VULKAN_HPP_NOEXCEPT
  20714. {
  20715. dataSize = dataSize_;
  20716. return *this;
  20717. }
  20718. SpecializationInfo & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
  20719. {
  20720. pData = pData_;
  20721. return *this;
  20722. }
  20723. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20724. template <typename T>
  20725. SpecializationInfo & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
  20726. {
  20727. dataSize = data_.size() * sizeof(T);
  20728. pData = data_.data();
  20729. return *this;
  20730. }
  20731. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  20732. operator VkSpecializationInfo const&() const VULKAN_HPP_NOEXCEPT
  20733. {
  20734. return *reinterpret_cast<const VkSpecializationInfo*>( this );
  20735. }
  20736. operator VkSpecializationInfo &() VULKAN_HPP_NOEXCEPT
  20737. {
  20738. return *reinterpret_cast<VkSpecializationInfo*>( this );
  20739. }
  20740. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20741. auto operator<=>( SpecializationInfo const& ) const = default;
  20742. #else
  20743. bool operator==( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20744. {
  20745. return ( mapEntryCount == rhs.mapEntryCount )
  20746. && ( pMapEntries == rhs.pMapEntries )
  20747. && ( dataSize == rhs.dataSize )
  20748. && ( pData == rhs.pData );
  20749. }
  20750. bool operator!=( SpecializationInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20751. {
  20752. return !operator==( rhs );
  20753. }
  20754. #endif
  20755. public:
  20756. uint32_t mapEntryCount = {};
  20757. const VULKAN_HPP_NAMESPACE::SpecializationMapEntry* pMapEntries = {};
  20758. size_t dataSize = {};
  20759. const void* pData = {};
  20760. };
  20761. static_assert( sizeof( SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
  20762. static_assert( std::is_standard_layout<SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
  20763. struct PipelineShaderStageCreateInfo
  20764. {
  20765. static const bool allowDuplicate = false;
  20766. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageCreateInfo;
  20767. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20768. VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo(VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex, VULKAN_HPP_NAMESPACE::ShaderModule module_ = {}, const char* pName_ = {}, const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ = {}) VULKAN_HPP_NOEXCEPT
  20769. : flags( flags_ ), stage( stage_ ), module( module_ ), pName( pName_ ), pSpecializationInfo( pSpecializationInfo_ )
  20770. {}
  20771. VULKAN_HPP_CONSTEXPR PipelineShaderStageCreateInfo( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20772. PipelineShaderStageCreateInfo( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20773. : PipelineShaderStageCreateInfo( *reinterpret_cast<PipelineShaderStageCreateInfo const *>( &rhs ) )
  20774. {}
  20775. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  20776. VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageCreateInfo & operator=( PipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  20777. PipelineShaderStageCreateInfo & operator=( VkPipelineShaderStageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  20778. {
  20779. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const *>( &rhs );
  20780. return *this;
  20781. }
  20782. PipelineShaderStageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  20783. {
  20784. pNext = pNext_;
  20785. return *this;
  20786. }
  20787. PipelineShaderStageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  20788. {
  20789. flags = flags_;
  20790. return *this;
  20791. }
  20792. PipelineShaderStageCreateInfo & setStage( VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage_ ) VULKAN_HPP_NOEXCEPT
  20793. {
  20794. stage = stage_;
  20795. return *this;
  20796. }
  20797. PipelineShaderStageCreateInfo & setModule( VULKAN_HPP_NAMESPACE::ShaderModule module_ ) VULKAN_HPP_NOEXCEPT
  20798. {
  20799. module = module_;
  20800. return *this;
  20801. }
  20802. PipelineShaderStageCreateInfo & setPName( const char* pName_ ) VULKAN_HPP_NOEXCEPT
  20803. {
  20804. pName = pName_;
  20805. return *this;
  20806. }
  20807. PipelineShaderStageCreateInfo & setPSpecializationInfo( const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo_ ) VULKAN_HPP_NOEXCEPT
  20808. {
  20809. pSpecializationInfo = pSpecializationInfo_;
  20810. return *this;
  20811. }
  20812. operator VkPipelineShaderStageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  20813. {
  20814. return *reinterpret_cast<const VkPipelineShaderStageCreateInfo*>( this );
  20815. }
  20816. operator VkPipelineShaderStageCreateInfo &() VULKAN_HPP_NOEXCEPT
  20817. {
  20818. return *reinterpret_cast<VkPipelineShaderStageCreateInfo*>( this );
  20819. }
  20820. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20821. auto operator<=>( PipelineShaderStageCreateInfo const& ) const = default;
  20822. #else
  20823. bool operator==( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20824. {
  20825. return ( sType == rhs.sType )
  20826. && ( pNext == rhs.pNext )
  20827. && ( flags == rhs.flags )
  20828. && ( stage == rhs.stage )
  20829. && ( module == rhs.module )
  20830. && ( pName == rhs.pName )
  20831. && ( pSpecializationInfo == rhs.pSpecializationInfo );
  20832. }
  20833. bool operator!=( PipelineShaderStageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  20834. {
  20835. return !operator==( rhs );
  20836. }
  20837. #endif
  20838. public:
  20839. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageCreateInfo;
  20840. const void* pNext = {};
  20841. VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags flags = {};
  20842. VULKAN_HPP_NAMESPACE::ShaderStageFlagBits stage = VULKAN_HPP_NAMESPACE::ShaderStageFlagBits::eVertex;
  20843. VULKAN_HPP_NAMESPACE::ShaderModule module = {};
  20844. const char* pName = {};
  20845. const VULKAN_HPP_NAMESPACE::SpecializationInfo* pSpecializationInfo = {};
  20846. };
  20847. static_assert( sizeof( PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ), "struct and wrapper have different size!" );
  20848. static_assert( std::is_standard_layout<PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
  20849. template <>
  20850. struct CppType<StructureType, StructureType::ePipelineShaderStageCreateInfo>
  20851. {
  20852. using Type = PipelineShaderStageCreateInfo;
  20853. };
  20854. class PipelineLayout
  20855. {
  20856. public:
  20857. using CType = VkPipelineLayout;
  20858. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout;
  20859. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout;
  20860. public:
  20861. VULKAN_HPP_CONSTEXPR PipelineLayout() VULKAN_HPP_NOEXCEPT
  20862. : m_pipelineLayout(VK_NULL_HANDLE)
  20863. {}
  20864. VULKAN_HPP_CONSTEXPR PipelineLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20865. : m_pipelineLayout(VK_NULL_HANDLE)
  20866. {}
  20867. VULKAN_HPP_TYPESAFE_EXPLICIT PipelineLayout( VkPipelineLayout pipelineLayout ) VULKAN_HPP_NOEXCEPT
  20868. : m_pipelineLayout( pipelineLayout )
  20869. {}
  20870. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  20871. PipelineLayout & operator=(VkPipelineLayout pipelineLayout) VULKAN_HPP_NOEXCEPT
  20872. {
  20873. m_pipelineLayout = pipelineLayout;
  20874. return *this;
  20875. }
  20876. #endif
  20877. PipelineLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20878. {
  20879. m_pipelineLayout = VK_NULL_HANDLE;
  20880. return *this;
  20881. }
  20882. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20883. auto operator<=>( PipelineLayout const& ) const = default;
  20884. #else
  20885. bool operator==( PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  20886. {
  20887. return m_pipelineLayout == rhs.m_pipelineLayout;
  20888. }
  20889. bool operator!=(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  20890. {
  20891. return m_pipelineLayout != rhs.m_pipelineLayout;
  20892. }
  20893. bool operator<(PipelineLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  20894. {
  20895. return m_pipelineLayout < rhs.m_pipelineLayout;
  20896. }
  20897. #endif
  20898. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineLayout() const VULKAN_HPP_NOEXCEPT
  20899. {
  20900. return m_pipelineLayout;
  20901. }
  20902. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  20903. {
  20904. return m_pipelineLayout != VK_NULL_HANDLE;
  20905. }
  20906. bool operator!() const VULKAN_HPP_NOEXCEPT
  20907. {
  20908. return m_pipelineLayout == VK_NULL_HANDLE;
  20909. }
  20910. private:
  20911. VkPipelineLayout m_pipelineLayout;
  20912. };
  20913. static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
  20914. template <>
  20915. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineLayout>
  20916. {
  20917. using type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  20918. };
  20919. template <>
  20920. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineLayout>
  20921. {
  20922. using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  20923. };
  20924. template <>
  20925. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineLayout>
  20926. {
  20927. using Type = VULKAN_HPP_NAMESPACE::PipelineLayout;
  20928. };
  20929. template <>
  20930. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineLayout>
  20931. {
  20932. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  20933. };
  20934. class Pipeline
  20935. {
  20936. public:
  20937. using CType = VkPipeline;
  20938. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipeline;
  20939. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline;
  20940. public:
  20941. VULKAN_HPP_CONSTEXPR Pipeline() VULKAN_HPP_NOEXCEPT
  20942. : m_pipeline(VK_NULL_HANDLE)
  20943. {}
  20944. VULKAN_HPP_CONSTEXPR Pipeline( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20945. : m_pipeline(VK_NULL_HANDLE)
  20946. {}
  20947. VULKAN_HPP_TYPESAFE_EXPLICIT Pipeline( VkPipeline pipeline ) VULKAN_HPP_NOEXCEPT
  20948. : m_pipeline( pipeline )
  20949. {}
  20950. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  20951. Pipeline & operator=(VkPipeline pipeline) VULKAN_HPP_NOEXCEPT
  20952. {
  20953. m_pipeline = pipeline;
  20954. return *this;
  20955. }
  20956. #endif
  20957. Pipeline & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  20958. {
  20959. m_pipeline = VK_NULL_HANDLE;
  20960. return *this;
  20961. }
  20962. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  20963. auto operator<=>( Pipeline const& ) const = default;
  20964. #else
  20965. bool operator==( Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
  20966. {
  20967. return m_pipeline == rhs.m_pipeline;
  20968. }
  20969. bool operator!=(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
  20970. {
  20971. return m_pipeline != rhs.m_pipeline;
  20972. }
  20973. bool operator<(Pipeline const & rhs ) const VULKAN_HPP_NOEXCEPT
  20974. {
  20975. return m_pipeline < rhs.m_pipeline;
  20976. }
  20977. #endif
  20978. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipeline() const VULKAN_HPP_NOEXCEPT
  20979. {
  20980. return m_pipeline;
  20981. }
  20982. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  20983. {
  20984. return m_pipeline != VK_NULL_HANDLE;
  20985. }
  20986. bool operator!() const VULKAN_HPP_NOEXCEPT
  20987. {
  20988. return m_pipeline == VK_NULL_HANDLE;
  20989. }
  20990. private:
  20991. VkPipeline m_pipeline;
  20992. };
  20993. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
  20994. template <>
  20995. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipeline>
  20996. {
  20997. using type = VULKAN_HPP_NAMESPACE::Pipeline;
  20998. };
  20999. template <>
  21000. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipeline>
  21001. {
  21002. using Type = VULKAN_HPP_NAMESPACE::Pipeline;
  21003. };
  21004. template <>
  21005. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipeline>
  21006. {
  21007. using Type = VULKAN_HPP_NAMESPACE::Pipeline;
  21008. };
  21009. template <>
  21010. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Pipeline>
  21011. {
  21012. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  21013. };
  21014. struct ComputePipelineCreateInfo
  21015. {
  21016. static const bool allowDuplicate = false;
  21017. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eComputePipelineCreateInfo;
  21018. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21019. VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
  21020. : flags( flags_ ), stage( stage_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  21021. {}
  21022. VULKAN_HPP_CONSTEXPR ComputePipelineCreateInfo( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21023. ComputePipelineCreateInfo( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  21024. : ComputePipelineCreateInfo( *reinterpret_cast<ComputePipelineCreateInfo const *>( &rhs ) )
  21025. {}
  21026. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21027. VULKAN_HPP_CONSTEXPR_14 ComputePipelineCreateInfo & operator=( ComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21028. ComputePipelineCreateInfo & operator=( VkComputePipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  21029. {
  21030. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo const *>( &rhs );
  21031. return *this;
  21032. }
  21033. ComputePipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21034. {
  21035. pNext = pNext_;
  21036. return *this;
  21037. }
  21038. ComputePipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  21039. {
  21040. flags = flags_;
  21041. return *this;
  21042. }
  21043. ComputePipelineCreateInfo & setStage( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo const & stage_ ) VULKAN_HPP_NOEXCEPT
  21044. {
  21045. stage = stage_;
  21046. return *this;
  21047. }
  21048. ComputePipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
  21049. {
  21050. layout = layout_;
  21051. return *this;
  21052. }
  21053. ComputePipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
  21054. {
  21055. basePipelineHandle = basePipelineHandle_;
  21056. return *this;
  21057. }
  21058. ComputePipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
  21059. {
  21060. basePipelineIndex = basePipelineIndex_;
  21061. return *this;
  21062. }
  21063. operator VkComputePipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  21064. {
  21065. return *reinterpret_cast<const VkComputePipelineCreateInfo*>( this );
  21066. }
  21067. operator VkComputePipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
  21068. {
  21069. return *reinterpret_cast<VkComputePipelineCreateInfo*>( this );
  21070. }
  21071. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21072. auto operator<=>( ComputePipelineCreateInfo const& ) const = default;
  21073. #else
  21074. bool operator==( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  21075. {
  21076. return ( sType == rhs.sType )
  21077. && ( pNext == rhs.pNext )
  21078. && ( flags == rhs.flags )
  21079. && ( stage == rhs.stage )
  21080. && ( layout == rhs.layout )
  21081. && ( basePipelineHandle == rhs.basePipelineHandle )
  21082. && ( basePipelineIndex == rhs.basePipelineIndex );
  21083. }
  21084. bool operator!=( ComputePipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  21085. {
  21086. return !operator==( rhs );
  21087. }
  21088. #endif
  21089. public:
  21090. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eComputePipelineCreateInfo;
  21091. const void* pNext = {};
  21092. VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
  21093. VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo stage = {};
  21094. VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
  21095. VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
  21096. int32_t basePipelineIndex = {};
  21097. };
  21098. static_assert( sizeof( ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ), "struct and wrapper have different size!" );
  21099. static_assert( std::is_standard_layout<ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
  21100. template <>
  21101. struct CppType<StructureType, StructureType::eComputePipelineCreateInfo>
  21102. {
  21103. using Type = ComputePipelineCreateInfo;
  21104. };
  21105. struct ConditionalRenderingBeginInfoEXT
  21106. {
  21107. static const bool allowDuplicate = false;
  21108. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eConditionalRenderingBeginInfoEXT;
  21109. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21110. VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
  21111. : buffer( buffer_ ), offset( offset_ ), flags( flags_ )
  21112. {}
  21113. VULKAN_HPP_CONSTEXPR ConditionalRenderingBeginInfoEXT( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21114. ConditionalRenderingBeginInfoEXT( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  21115. : ConditionalRenderingBeginInfoEXT( *reinterpret_cast<ConditionalRenderingBeginInfoEXT const *>( &rhs ) )
  21116. {}
  21117. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21118. VULKAN_HPP_CONSTEXPR_14 ConditionalRenderingBeginInfoEXT & operator=( ConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21119. ConditionalRenderingBeginInfoEXT & operator=( VkConditionalRenderingBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  21120. {
  21121. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT const *>( &rhs );
  21122. return *this;
  21123. }
  21124. ConditionalRenderingBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21125. {
  21126. pNext = pNext_;
  21127. return *this;
  21128. }
  21129. ConditionalRenderingBeginInfoEXT & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  21130. {
  21131. buffer = buffer_;
  21132. return *this;
  21133. }
  21134. ConditionalRenderingBeginInfoEXT & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  21135. {
  21136. offset = offset_;
  21137. return *this;
  21138. }
  21139. ConditionalRenderingBeginInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  21140. {
  21141. flags = flags_;
  21142. return *this;
  21143. }
  21144. operator VkConditionalRenderingBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  21145. {
  21146. return *reinterpret_cast<const VkConditionalRenderingBeginInfoEXT*>( this );
  21147. }
  21148. operator VkConditionalRenderingBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
  21149. {
  21150. return *reinterpret_cast<VkConditionalRenderingBeginInfoEXT*>( this );
  21151. }
  21152. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21153. auto operator<=>( ConditionalRenderingBeginInfoEXT const& ) const = default;
  21154. #else
  21155. bool operator==( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  21156. {
  21157. return ( sType == rhs.sType )
  21158. && ( pNext == rhs.pNext )
  21159. && ( buffer == rhs.buffer )
  21160. && ( offset == rhs.offset )
  21161. && ( flags == rhs.flags );
  21162. }
  21163. bool operator!=( ConditionalRenderingBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  21164. {
  21165. return !operator==( rhs );
  21166. }
  21167. #endif
  21168. public:
  21169. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eConditionalRenderingBeginInfoEXT;
  21170. const void* pNext = {};
  21171. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  21172. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  21173. VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT flags = {};
  21174. };
  21175. static_assert( sizeof( ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ), "struct and wrapper have different size!" );
  21176. static_assert( std::is_standard_layout<ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
  21177. template <>
  21178. struct CppType<StructureType, StructureType::eConditionalRenderingBeginInfoEXT>
  21179. {
  21180. using Type = ConditionalRenderingBeginInfoEXT;
  21181. };
  21182. struct ConformanceVersion
  21183. {
  21184. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21185. VULKAN_HPP_CONSTEXPR ConformanceVersion(uint8_t major_ = {}, uint8_t minor_ = {}, uint8_t subminor_ = {}, uint8_t patch_ = {}) VULKAN_HPP_NOEXCEPT
  21186. : major( major_ ), minor( minor_ ), subminor( subminor_ ), patch( patch_ )
  21187. {}
  21188. VULKAN_HPP_CONSTEXPR ConformanceVersion( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21189. ConformanceVersion( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
  21190. : ConformanceVersion( *reinterpret_cast<ConformanceVersion const *>( &rhs ) )
  21191. {}
  21192. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21193. VULKAN_HPP_CONSTEXPR_14 ConformanceVersion & operator=( ConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21194. ConformanceVersion & operator=( VkConformanceVersion const & rhs ) VULKAN_HPP_NOEXCEPT
  21195. {
  21196. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ConformanceVersion const *>( &rhs );
  21197. return *this;
  21198. }
  21199. ConformanceVersion & setMajor( uint8_t major_ ) VULKAN_HPP_NOEXCEPT
  21200. {
  21201. major = major_;
  21202. return *this;
  21203. }
  21204. ConformanceVersion & setMinor( uint8_t minor_ ) VULKAN_HPP_NOEXCEPT
  21205. {
  21206. minor = minor_;
  21207. return *this;
  21208. }
  21209. ConformanceVersion & setSubminor( uint8_t subminor_ ) VULKAN_HPP_NOEXCEPT
  21210. {
  21211. subminor = subminor_;
  21212. return *this;
  21213. }
  21214. ConformanceVersion & setPatch( uint8_t patch_ ) VULKAN_HPP_NOEXCEPT
  21215. {
  21216. patch = patch_;
  21217. return *this;
  21218. }
  21219. operator VkConformanceVersion const&() const VULKAN_HPP_NOEXCEPT
  21220. {
  21221. return *reinterpret_cast<const VkConformanceVersion*>( this );
  21222. }
  21223. operator VkConformanceVersion &() VULKAN_HPP_NOEXCEPT
  21224. {
  21225. return *reinterpret_cast<VkConformanceVersion*>( this );
  21226. }
  21227. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21228. auto operator<=>( ConformanceVersion const& ) const = default;
  21229. #else
  21230. bool operator==( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
  21231. {
  21232. return ( major == rhs.major )
  21233. && ( minor == rhs.minor )
  21234. && ( subminor == rhs.subminor )
  21235. && ( patch == rhs.patch );
  21236. }
  21237. bool operator!=( ConformanceVersion const& rhs ) const VULKAN_HPP_NOEXCEPT
  21238. {
  21239. return !operator==( rhs );
  21240. }
  21241. #endif
  21242. public:
  21243. uint8_t major = {};
  21244. uint8_t minor = {};
  21245. uint8_t subminor = {};
  21246. uint8_t patch = {};
  21247. };
  21248. static_assert( sizeof( ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
  21249. static_assert( std::is_standard_layout<ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
  21250. using ConformanceVersionKHR = ConformanceVersion;
  21251. struct CooperativeMatrixPropertiesNV
  21252. {
  21253. static const bool allowDuplicate = false;
  21254. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCooperativeMatrixPropertiesNV;
  21255. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21256. VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV(uint32_t MSize_ = {}, uint32_t NSize_ = {}, uint32_t KSize_ = {}, VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16, VULKAN_HPP_NAMESPACE::ScopeNV scope_ = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice) VULKAN_HPP_NOEXCEPT
  21257. : MSize( MSize_ ), NSize( NSize_ ), KSize( KSize_ ), AType( AType_ ), BType( BType_ ), CType( CType_ ), DType( DType_ ), scope( scope_ )
  21258. {}
  21259. VULKAN_HPP_CONSTEXPR CooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21260. CooperativeMatrixPropertiesNV( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  21261. : CooperativeMatrixPropertiesNV( *reinterpret_cast<CooperativeMatrixPropertiesNV const *>( &rhs ) )
  21262. {}
  21263. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21264. VULKAN_HPP_CONSTEXPR_14 CooperativeMatrixPropertiesNV & operator=( CooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21265. CooperativeMatrixPropertiesNV & operator=( VkCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  21266. {
  21267. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV const *>( &rhs );
  21268. return *this;
  21269. }
  21270. CooperativeMatrixPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21271. {
  21272. pNext = pNext_;
  21273. return *this;
  21274. }
  21275. CooperativeMatrixPropertiesNV & setMSize( uint32_t MSize_ ) VULKAN_HPP_NOEXCEPT
  21276. {
  21277. MSize = MSize_;
  21278. return *this;
  21279. }
  21280. CooperativeMatrixPropertiesNV & setNSize( uint32_t NSize_ ) VULKAN_HPP_NOEXCEPT
  21281. {
  21282. NSize = NSize_;
  21283. return *this;
  21284. }
  21285. CooperativeMatrixPropertiesNV & setKSize( uint32_t KSize_ ) VULKAN_HPP_NOEXCEPT
  21286. {
  21287. KSize = KSize_;
  21288. return *this;
  21289. }
  21290. CooperativeMatrixPropertiesNV & setAType( VULKAN_HPP_NAMESPACE::ComponentTypeNV AType_ ) VULKAN_HPP_NOEXCEPT
  21291. {
  21292. AType = AType_;
  21293. return *this;
  21294. }
  21295. CooperativeMatrixPropertiesNV & setBType( VULKAN_HPP_NAMESPACE::ComponentTypeNV BType_ ) VULKAN_HPP_NOEXCEPT
  21296. {
  21297. BType = BType_;
  21298. return *this;
  21299. }
  21300. CooperativeMatrixPropertiesNV & setCType( VULKAN_HPP_NAMESPACE::ComponentTypeNV CType_ ) VULKAN_HPP_NOEXCEPT
  21301. {
  21302. CType = CType_;
  21303. return *this;
  21304. }
  21305. CooperativeMatrixPropertiesNV & setDType( VULKAN_HPP_NAMESPACE::ComponentTypeNV DType_ ) VULKAN_HPP_NOEXCEPT
  21306. {
  21307. DType = DType_;
  21308. return *this;
  21309. }
  21310. CooperativeMatrixPropertiesNV & setScope( VULKAN_HPP_NAMESPACE::ScopeNV scope_ ) VULKAN_HPP_NOEXCEPT
  21311. {
  21312. scope = scope_;
  21313. return *this;
  21314. }
  21315. operator VkCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  21316. {
  21317. return *reinterpret_cast<const VkCooperativeMatrixPropertiesNV*>( this );
  21318. }
  21319. operator VkCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
  21320. {
  21321. return *reinterpret_cast<VkCooperativeMatrixPropertiesNV*>( this );
  21322. }
  21323. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21324. auto operator<=>( CooperativeMatrixPropertiesNV const& ) const = default;
  21325. #else
  21326. bool operator==( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  21327. {
  21328. return ( sType == rhs.sType )
  21329. && ( pNext == rhs.pNext )
  21330. && ( MSize == rhs.MSize )
  21331. && ( NSize == rhs.NSize )
  21332. && ( KSize == rhs.KSize )
  21333. && ( AType == rhs.AType )
  21334. && ( BType == rhs.BType )
  21335. && ( CType == rhs.CType )
  21336. && ( DType == rhs.DType )
  21337. && ( scope == rhs.scope );
  21338. }
  21339. bool operator!=( CooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  21340. {
  21341. return !operator==( rhs );
  21342. }
  21343. #endif
  21344. public:
  21345. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCooperativeMatrixPropertiesNV;
  21346. void* pNext = {};
  21347. uint32_t MSize = {};
  21348. uint32_t NSize = {};
  21349. uint32_t KSize = {};
  21350. VULKAN_HPP_NAMESPACE::ComponentTypeNV AType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
  21351. VULKAN_HPP_NAMESPACE::ComponentTypeNV BType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
  21352. VULKAN_HPP_NAMESPACE::ComponentTypeNV CType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
  21353. VULKAN_HPP_NAMESPACE::ComponentTypeNV DType = VULKAN_HPP_NAMESPACE::ComponentTypeNV::eFloat16;
  21354. VULKAN_HPP_NAMESPACE::ScopeNV scope = VULKAN_HPP_NAMESPACE::ScopeNV::eDevice;
  21355. };
  21356. static_assert( sizeof( CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
  21357. static_assert( std::is_standard_layout<CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  21358. template <>
  21359. struct CppType<StructureType, StructureType::eCooperativeMatrixPropertiesNV>
  21360. {
  21361. using Type = CooperativeMatrixPropertiesNV;
  21362. };
  21363. struct CopyAccelerationStructureInfoKHR
  21364. {
  21365. static const bool allowDuplicate = false;
  21366. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureInfoKHR;
  21367. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21368. VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
  21369. : src( src_ ), dst( dst_ ), mode( mode_ )
  21370. {}
  21371. VULKAN_HPP_CONSTEXPR CopyAccelerationStructureInfoKHR( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21372. CopyAccelerationStructureInfoKHR( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21373. : CopyAccelerationStructureInfoKHR( *reinterpret_cast<CopyAccelerationStructureInfoKHR const *>( &rhs ) )
  21374. {}
  21375. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21376. VULKAN_HPP_CONSTEXPR_14 CopyAccelerationStructureInfoKHR & operator=( CopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21377. CopyAccelerationStructureInfoKHR & operator=( VkCopyAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21378. {
  21379. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR const *>( &rhs );
  21380. return *this;
  21381. }
  21382. CopyAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21383. {
  21384. pNext = pNext_;
  21385. return *this;
  21386. }
  21387. CopyAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
  21388. {
  21389. src = src_;
  21390. return *this;
  21391. }
  21392. CopyAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
  21393. {
  21394. dst = dst_;
  21395. return *this;
  21396. }
  21397. CopyAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
  21398. {
  21399. mode = mode_;
  21400. return *this;
  21401. }
  21402. operator VkCopyAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  21403. {
  21404. return *reinterpret_cast<const VkCopyAccelerationStructureInfoKHR*>( this );
  21405. }
  21406. operator VkCopyAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
  21407. {
  21408. return *reinterpret_cast<VkCopyAccelerationStructureInfoKHR*>( this );
  21409. }
  21410. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21411. auto operator<=>( CopyAccelerationStructureInfoKHR const& ) const = default;
  21412. #else
  21413. bool operator==( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21414. {
  21415. return ( sType == rhs.sType )
  21416. && ( pNext == rhs.pNext )
  21417. && ( src == rhs.src )
  21418. && ( dst == rhs.dst )
  21419. && ( mode == rhs.mode );
  21420. }
  21421. bool operator!=( CopyAccelerationStructureInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21422. {
  21423. return !operator==( rhs );
  21424. }
  21425. #endif
  21426. public:
  21427. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureInfoKHR;
  21428. const void* pNext = {};
  21429. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
  21430. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
  21431. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  21432. };
  21433. static_assert( sizeof( CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
  21434. static_assert( std::is_standard_layout<CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
  21435. template <>
  21436. struct CppType<StructureType, StructureType::eCopyAccelerationStructureInfoKHR>
  21437. {
  21438. using Type = CopyAccelerationStructureInfoKHR;
  21439. };
  21440. struct CopyAccelerationStructureToMemoryInfoKHR
  21441. {
  21442. static const bool allowDuplicate = false;
  21443. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
  21444. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21445. CopyAccelerationStructureToMemoryInfoKHR(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ = {}, VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
  21446. : src( src_ ), dst( dst_ ), mode( mode_ )
  21447. {}
  21448. CopyAccelerationStructureToMemoryInfoKHR( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21449. CopyAccelerationStructureToMemoryInfoKHR( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21450. : CopyAccelerationStructureToMemoryInfoKHR( *reinterpret_cast<CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs ) )
  21451. {}
  21452. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21453. CopyAccelerationStructureToMemoryInfoKHR & operator=( CopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21454. CopyAccelerationStructureToMemoryInfoKHR & operator=( VkCopyAccelerationStructureToMemoryInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21455. {
  21456. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR const *>( &rhs );
  21457. return *this;
  21458. }
  21459. CopyAccelerationStructureToMemoryInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21460. {
  21461. pNext = pNext_;
  21462. return *this;
  21463. }
  21464. CopyAccelerationStructureToMemoryInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src_ ) VULKAN_HPP_NOEXCEPT
  21465. {
  21466. src = src_;
  21467. return *this;
  21468. }
  21469. CopyAccelerationStructureToMemoryInfoKHR & setDst( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR const & dst_ ) VULKAN_HPP_NOEXCEPT
  21470. {
  21471. dst = dst_;
  21472. return *this;
  21473. }
  21474. CopyAccelerationStructureToMemoryInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
  21475. {
  21476. mode = mode_;
  21477. return *this;
  21478. }
  21479. operator VkCopyAccelerationStructureToMemoryInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  21480. {
  21481. return *reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
  21482. }
  21483. operator VkCopyAccelerationStructureToMemoryInfoKHR &() VULKAN_HPP_NOEXCEPT
  21484. {
  21485. return *reinterpret_cast<VkCopyAccelerationStructureToMemoryInfoKHR*>( this );
  21486. }
  21487. public:
  21488. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyAccelerationStructureToMemoryInfoKHR;
  21489. const void* pNext = {};
  21490. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR src = {};
  21491. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR dst = {};
  21492. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  21493. };
  21494. static_assert( sizeof( CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ), "struct and wrapper have different size!" );
  21495. static_assert( std::is_standard_layout<CopyAccelerationStructureToMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
  21496. template <>
  21497. struct CppType<StructureType, StructureType::eCopyAccelerationStructureToMemoryInfoKHR>
  21498. {
  21499. using Type = CopyAccelerationStructureToMemoryInfoKHR;
  21500. };
  21501. struct CopyBufferInfo2KHR
  21502. {
  21503. static const bool allowDuplicate = false;
  21504. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferInfo2KHR;
  21505. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21506. VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  21507. : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
  21508. {}
  21509. VULKAN_HPP_CONSTEXPR CopyBufferInfo2KHR( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21510. CopyBufferInfo2KHR( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21511. : CopyBufferInfo2KHR( *reinterpret_cast<CopyBufferInfo2KHR const *>( &rhs ) )
  21512. {}
  21513. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21514. CopyBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ )
  21515. : srcBuffer( srcBuffer_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  21516. {}
  21517. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21518. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21519. VULKAN_HPP_CONSTEXPR_14 CopyBufferInfo2KHR & operator=( CopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21520. CopyBufferInfo2KHR & operator=( VkCopyBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21521. {
  21522. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR const *>( &rhs );
  21523. return *this;
  21524. }
  21525. CopyBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21526. {
  21527. pNext = pNext_;
  21528. return *this;
  21529. }
  21530. CopyBufferInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
  21531. {
  21532. srcBuffer = srcBuffer_;
  21533. return *this;
  21534. }
  21535. CopyBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
  21536. {
  21537. dstBuffer = dstBuffer_;
  21538. return *this;
  21539. }
  21540. CopyBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  21541. {
  21542. regionCount = regionCount_;
  21543. return *this;
  21544. }
  21545. CopyBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  21546. {
  21547. pRegions = pRegions_;
  21548. return *this;
  21549. }
  21550. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21551. CopyBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  21552. {
  21553. regionCount = static_cast<uint32_t>( regions_.size() );
  21554. pRegions = regions_.data();
  21555. return *this;
  21556. }
  21557. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21558. operator VkCopyBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  21559. {
  21560. return *reinterpret_cast<const VkCopyBufferInfo2KHR*>( this );
  21561. }
  21562. operator VkCopyBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
  21563. {
  21564. return *reinterpret_cast<VkCopyBufferInfo2KHR*>( this );
  21565. }
  21566. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21567. auto operator<=>( CopyBufferInfo2KHR const& ) const = default;
  21568. #else
  21569. bool operator==( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21570. {
  21571. return ( sType == rhs.sType )
  21572. && ( pNext == rhs.pNext )
  21573. && ( srcBuffer == rhs.srcBuffer )
  21574. && ( dstBuffer == rhs.dstBuffer )
  21575. && ( regionCount == rhs.regionCount )
  21576. && ( pRegions == rhs.pRegions );
  21577. }
  21578. bool operator!=( CopyBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21579. {
  21580. return !operator==( rhs );
  21581. }
  21582. #endif
  21583. public:
  21584. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferInfo2KHR;
  21585. const void* pNext = {};
  21586. VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
  21587. VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
  21588. uint32_t regionCount = {};
  21589. const VULKAN_HPP_NAMESPACE::BufferCopy2KHR* pRegions = {};
  21590. };
  21591. static_assert( sizeof( CopyBufferInfo2KHR ) == sizeof( VkCopyBufferInfo2KHR ), "struct and wrapper have different size!" );
  21592. static_assert( std::is_standard_layout<CopyBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  21593. template <>
  21594. struct CppType<StructureType, StructureType::eCopyBufferInfo2KHR>
  21595. {
  21596. using Type = CopyBufferInfo2KHR;
  21597. };
  21598. struct CopyBufferToImageInfo2KHR
  21599. {
  21600. static const bool allowDuplicate = false;
  21601. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyBufferToImageInfo2KHR;
  21602. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21603. VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR(VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ = {}, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  21604. : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
  21605. {}
  21606. VULKAN_HPP_CONSTEXPR CopyBufferToImageInfo2KHR( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21607. CopyBufferToImageInfo2KHR( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21608. : CopyBufferToImageInfo2KHR( *reinterpret_cast<CopyBufferToImageInfo2KHR const *>( &rhs ) )
  21609. {}
  21610. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21611. CopyBufferToImageInfo2KHR( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
  21612. : srcBuffer( srcBuffer_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  21613. {}
  21614. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21615. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21616. VULKAN_HPP_CONSTEXPR_14 CopyBufferToImageInfo2KHR & operator=( CopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21617. CopyBufferToImageInfo2KHR & operator=( VkCopyBufferToImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21618. {
  21619. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR const *>( &rhs );
  21620. return *this;
  21621. }
  21622. CopyBufferToImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21623. {
  21624. pNext = pNext_;
  21625. return *this;
  21626. }
  21627. CopyBufferToImageInfo2KHR & setSrcBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer_ ) VULKAN_HPP_NOEXCEPT
  21628. {
  21629. srcBuffer = srcBuffer_;
  21630. return *this;
  21631. }
  21632. CopyBufferToImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
  21633. {
  21634. dstImage = dstImage_;
  21635. return *this;
  21636. }
  21637. CopyBufferToImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
  21638. {
  21639. dstImageLayout = dstImageLayout_;
  21640. return *this;
  21641. }
  21642. CopyBufferToImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  21643. {
  21644. regionCount = regionCount_;
  21645. return *this;
  21646. }
  21647. CopyBufferToImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  21648. {
  21649. pRegions = pRegions_;
  21650. return *this;
  21651. }
  21652. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21653. CopyBufferToImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  21654. {
  21655. regionCount = static_cast<uint32_t>( regions_.size() );
  21656. pRegions = regions_.data();
  21657. return *this;
  21658. }
  21659. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  21660. operator VkCopyBufferToImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  21661. {
  21662. return *reinterpret_cast<const VkCopyBufferToImageInfo2KHR*>( this );
  21663. }
  21664. operator VkCopyBufferToImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
  21665. {
  21666. return *reinterpret_cast<VkCopyBufferToImageInfo2KHR*>( this );
  21667. }
  21668. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21669. auto operator<=>( CopyBufferToImageInfo2KHR const& ) const = default;
  21670. #else
  21671. bool operator==( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21672. {
  21673. return ( sType == rhs.sType )
  21674. && ( pNext == rhs.pNext )
  21675. && ( srcBuffer == rhs.srcBuffer )
  21676. && ( dstImage == rhs.dstImage )
  21677. && ( dstImageLayout == rhs.dstImageLayout )
  21678. && ( regionCount == rhs.regionCount )
  21679. && ( pRegions == rhs.pRegions );
  21680. }
  21681. bool operator!=( CopyBufferToImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  21682. {
  21683. return !operator==( rhs );
  21684. }
  21685. #endif
  21686. public:
  21687. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyBufferToImageInfo2KHR;
  21688. const void* pNext = {};
  21689. VULKAN_HPP_NAMESPACE::Buffer srcBuffer = {};
  21690. VULKAN_HPP_NAMESPACE::Image dstImage = {};
  21691. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  21692. uint32_t regionCount = {};
  21693. const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
  21694. };
  21695. static_assert( sizeof( CopyBufferToImageInfo2KHR ) == sizeof( VkCopyBufferToImageInfo2KHR ), "struct and wrapper have different size!" );
  21696. static_assert( std::is_standard_layout<CopyBufferToImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  21697. template <>
  21698. struct CppType<StructureType, StructureType::eCopyBufferToImageInfo2KHR>
  21699. {
  21700. using Type = CopyBufferToImageInfo2KHR;
  21701. };
  21702. struct CopyCommandTransformInfoQCOM
  21703. {
  21704. static const bool allowDuplicate = false;
  21705. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyCommandTransformInfoQCOM;
  21706. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21707. VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
  21708. : transform( transform_ )
  21709. {}
  21710. VULKAN_HPP_CONSTEXPR CopyCommandTransformInfoQCOM( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21711. CopyCommandTransformInfoQCOM( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  21712. : CopyCommandTransformInfoQCOM( *reinterpret_cast<CopyCommandTransformInfoQCOM const *>( &rhs ) )
  21713. {}
  21714. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21715. VULKAN_HPP_CONSTEXPR_14 CopyCommandTransformInfoQCOM & operator=( CopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21716. CopyCommandTransformInfoQCOM & operator=( VkCopyCommandTransformInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  21717. {
  21718. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM const *>( &rhs );
  21719. return *this;
  21720. }
  21721. CopyCommandTransformInfoQCOM & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21722. {
  21723. pNext = pNext_;
  21724. return *this;
  21725. }
  21726. CopyCommandTransformInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
  21727. {
  21728. transform = transform_;
  21729. return *this;
  21730. }
  21731. operator VkCopyCommandTransformInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
  21732. {
  21733. return *reinterpret_cast<const VkCopyCommandTransformInfoQCOM*>( this );
  21734. }
  21735. operator VkCopyCommandTransformInfoQCOM &() VULKAN_HPP_NOEXCEPT
  21736. {
  21737. return *reinterpret_cast<VkCopyCommandTransformInfoQCOM*>( this );
  21738. }
  21739. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21740. auto operator<=>( CopyCommandTransformInfoQCOM const& ) const = default;
  21741. #else
  21742. bool operator==( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  21743. {
  21744. return ( sType == rhs.sType )
  21745. && ( pNext == rhs.pNext )
  21746. && ( transform == rhs.transform );
  21747. }
  21748. bool operator!=( CopyCommandTransformInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  21749. {
  21750. return !operator==( rhs );
  21751. }
  21752. #endif
  21753. public:
  21754. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyCommandTransformInfoQCOM;
  21755. const void* pNext = {};
  21756. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  21757. };
  21758. static_assert( sizeof( CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ), "struct and wrapper have different size!" );
  21759. static_assert( std::is_standard_layout<CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
  21760. template <>
  21761. struct CppType<StructureType, StructureType::eCopyCommandTransformInfoQCOM>
  21762. {
  21763. using Type = CopyCommandTransformInfoQCOM;
  21764. };
  21765. class DescriptorSet
  21766. {
  21767. public:
  21768. using CType = VkDescriptorSet;
  21769. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet;
  21770. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet;
  21771. public:
  21772. VULKAN_HPP_CONSTEXPR DescriptorSet() VULKAN_HPP_NOEXCEPT
  21773. : m_descriptorSet(VK_NULL_HANDLE)
  21774. {}
  21775. VULKAN_HPP_CONSTEXPR DescriptorSet( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  21776. : m_descriptorSet(VK_NULL_HANDLE)
  21777. {}
  21778. VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSet( VkDescriptorSet descriptorSet ) VULKAN_HPP_NOEXCEPT
  21779. : m_descriptorSet( descriptorSet )
  21780. {}
  21781. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  21782. DescriptorSet & operator=(VkDescriptorSet descriptorSet) VULKAN_HPP_NOEXCEPT
  21783. {
  21784. m_descriptorSet = descriptorSet;
  21785. return *this;
  21786. }
  21787. #endif
  21788. DescriptorSet & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  21789. {
  21790. m_descriptorSet = VK_NULL_HANDLE;
  21791. return *this;
  21792. }
  21793. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21794. auto operator<=>( DescriptorSet const& ) const = default;
  21795. #else
  21796. bool operator==( DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
  21797. {
  21798. return m_descriptorSet == rhs.m_descriptorSet;
  21799. }
  21800. bool operator!=(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
  21801. {
  21802. return m_descriptorSet != rhs.m_descriptorSet;
  21803. }
  21804. bool operator<(DescriptorSet const & rhs ) const VULKAN_HPP_NOEXCEPT
  21805. {
  21806. return m_descriptorSet < rhs.m_descriptorSet;
  21807. }
  21808. #endif
  21809. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSet() const VULKAN_HPP_NOEXCEPT
  21810. {
  21811. return m_descriptorSet;
  21812. }
  21813. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  21814. {
  21815. return m_descriptorSet != VK_NULL_HANDLE;
  21816. }
  21817. bool operator!() const VULKAN_HPP_NOEXCEPT
  21818. {
  21819. return m_descriptorSet == VK_NULL_HANDLE;
  21820. }
  21821. private:
  21822. VkDescriptorSet m_descriptorSet;
  21823. };
  21824. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
  21825. template <>
  21826. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSet>
  21827. {
  21828. using type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  21829. };
  21830. template <>
  21831. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSet>
  21832. {
  21833. using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  21834. };
  21835. template <>
  21836. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSet>
  21837. {
  21838. using Type = VULKAN_HPP_NAMESPACE::DescriptorSet;
  21839. };
  21840. template <>
  21841. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSet>
  21842. {
  21843. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  21844. };
  21845. struct CopyDescriptorSet
  21846. {
  21847. static const bool allowDuplicate = false;
  21848. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyDescriptorSet;
  21849. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21850. VULKAN_HPP_CONSTEXPR CopyDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ = {}, uint32_t srcBinding_ = {}, uint32_t srcArrayElement_ = {}, VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
  21851. : srcSet( srcSet_ ), srcBinding( srcBinding_ ), srcArrayElement( srcArrayElement_ ), dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ )
  21852. {}
  21853. VULKAN_HPP_CONSTEXPR CopyDescriptorSet( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21854. CopyDescriptorSet( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
  21855. : CopyDescriptorSet( *reinterpret_cast<CopyDescriptorSet const *>( &rhs ) )
  21856. {}
  21857. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21858. VULKAN_HPP_CONSTEXPR_14 CopyDescriptorSet & operator=( CopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21859. CopyDescriptorSet & operator=( VkCopyDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
  21860. {
  21861. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyDescriptorSet const *>( &rhs );
  21862. return *this;
  21863. }
  21864. CopyDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21865. {
  21866. pNext = pNext_;
  21867. return *this;
  21868. }
  21869. CopyDescriptorSet & setSrcSet( VULKAN_HPP_NAMESPACE::DescriptorSet srcSet_ ) VULKAN_HPP_NOEXCEPT
  21870. {
  21871. srcSet = srcSet_;
  21872. return *this;
  21873. }
  21874. CopyDescriptorSet & setSrcBinding( uint32_t srcBinding_ ) VULKAN_HPP_NOEXCEPT
  21875. {
  21876. srcBinding = srcBinding_;
  21877. return *this;
  21878. }
  21879. CopyDescriptorSet & setSrcArrayElement( uint32_t srcArrayElement_ ) VULKAN_HPP_NOEXCEPT
  21880. {
  21881. srcArrayElement = srcArrayElement_;
  21882. return *this;
  21883. }
  21884. CopyDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
  21885. {
  21886. dstSet = dstSet_;
  21887. return *this;
  21888. }
  21889. CopyDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
  21890. {
  21891. dstBinding = dstBinding_;
  21892. return *this;
  21893. }
  21894. CopyDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
  21895. {
  21896. dstArrayElement = dstArrayElement_;
  21897. return *this;
  21898. }
  21899. CopyDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
  21900. {
  21901. descriptorCount = descriptorCount_;
  21902. return *this;
  21903. }
  21904. operator VkCopyDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
  21905. {
  21906. return *reinterpret_cast<const VkCopyDescriptorSet*>( this );
  21907. }
  21908. operator VkCopyDescriptorSet &() VULKAN_HPP_NOEXCEPT
  21909. {
  21910. return *reinterpret_cast<VkCopyDescriptorSet*>( this );
  21911. }
  21912. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  21913. auto operator<=>( CopyDescriptorSet const& ) const = default;
  21914. #else
  21915. bool operator==( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
  21916. {
  21917. return ( sType == rhs.sType )
  21918. && ( pNext == rhs.pNext )
  21919. && ( srcSet == rhs.srcSet )
  21920. && ( srcBinding == rhs.srcBinding )
  21921. && ( srcArrayElement == rhs.srcArrayElement )
  21922. && ( dstSet == rhs.dstSet )
  21923. && ( dstBinding == rhs.dstBinding )
  21924. && ( dstArrayElement == rhs.dstArrayElement )
  21925. && ( descriptorCount == rhs.descriptorCount );
  21926. }
  21927. bool operator!=( CopyDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
  21928. {
  21929. return !operator==( rhs );
  21930. }
  21931. #endif
  21932. public:
  21933. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyDescriptorSet;
  21934. const void* pNext = {};
  21935. VULKAN_HPP_NAMESPACE::DescriptorSet srcSet = {};
  21936. uint32_t srcBinding = {};
  21937. uint32_t srcArrayElement = {};
  21938. VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
  21939. uint32_t dstBinding = {};
  21940. uint32_t dstArrayElement = {};
  21941. uint32_t descriptorCount = {};
  21942. };
  21943. static_assert( sizeof( CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
  21944. static_assert( std::is_standard_layout<CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
  21945. template <>
  21946. struct CppType<StructureType, StructureType::eCopyDescriptorSet>
  21947. {
  21948. using Type = CopyDescriptorSet;
  21949. };
  21950. struct ImageCopy2KHR
  21951. {
  21952. static const bool allowDuplicate = false;
  21953. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCopy2KHR;
  21954. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21955. VULKAN_HPP_CONSTEXPR ImageCopy2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
  21956. : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
  21957. {}
  21958. VULKAN_HPP_CONSTEXPR ImageCopy2KHR( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21959. ImageCopy2KHR( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21960. : ImageCopy2KHR( *reinterpret_cast<ImageCopy2KHR const *>( &rhs ) )
  21961. {}
  21962. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  21963. VULKAN_HPP_CONSTEXPR_14 ImageCopy2KHR & operator=( ImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  21964. ImageCopy2KHR & operator=( VkImageCopy2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  21965. {
  21966. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy2KHR const *>( &rhs );
  21967. return *this;
  21968. }
  21969. ImageCopy2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  21970. {
  21971. pNext = pNext_;
  21972. return *this;
  21973. }
  21974. ImageCopy2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  21975. {
  21976. srcSubresource = srcSubresource_;
  21977. return *this;
  21978. }
  21979. ImageCopy2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
  21980. {
  21981. srcOffset = srcOffset_;
  21982. return *this;
  21983. }
  21984. ImageCopy2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  21985. {
  21986. dstSubresource = dstSubresource_;
  21987. return *this;
  21988. }
  21989. ImageCopy2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
  21990. {
  21991. dstOffset = dstOffset_;
  21992. return *this;
  21993. }
  21994. ImageCopy2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  21995. {
  21996. extent = extent_;
  21997. return *this;
  21998. }
  21999. operator VkImageCopy2KHR const&() const VULKAN_HPP_NOEXCEPT
  22000. {
  22001. return *reinterpret_cast<const VkImageCopy2KHR*>( this );
  22002. }
  22003. operator VkImageCopy2KHR &() VULKAN_HPP_NOEXCEPT
  22004. {
  22005. return *reinterpret_cast<VkImageCopy2KHR*>( this );
  22006. }
  22007. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22008. auto operator<=>( ImageCopy2KHR const& ) const = default;
  22009. #else
  22010. bool operator==( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22011. {
  22012. return ( sType == rhs.sType )
  22013. && ( pNext == rhs.pNext )
  22014. && ( srcSubresource == rhs.srcSubresource )
  22015. && ( srcOffset == rhs.srcOffset )
  22016. && ( dstSubresource == rhs.dstSubresource )
  22017. && ( dstOffset == rhs.dstOffset )
  22018. && ( extent == rhs.extent );
  22019. }
  22020. bool operator!=( ImageCopy2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22021. {
  22022. return !operator==( rhs );
  22023. }
  22024. #endif
  22025. public:
  22026. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCopy2KHR;
  22027. const void* pNext = {};
  22028. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  22029. VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
  22030. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  22031. VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
  22032. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  22033. };
  22034. static_assert( sizeof( ImageCopy2KHR ) == sizeof( VkImageCopy2KHR ), "struct and wrapper have different size!" );
  22035. static_assert( std::is_standard_layout<ImageCopy2KHR>::value, "struct wrapper is not a standard layout!" );
  22036. template <>
  22037. struct CppType<StructureType, StructureType::eImageCopy2KHR>
  22038. {
  22039. using Type = ImageCopy2KHR;
  22040. };
  22041. struct CopyImageInfo2KHR
  22042. {
  22043. static const bool allowDuplicate = false;
  22044. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageInfo2KHR;
  22045. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22046. VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  22047. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
  22048. {}
  22049. VULKAN_HPP_CONSTEXPR CopyImageInfo2KHR( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22050. CopyImageInfo2KHR( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22051. : CopyImageInfo2KHR( *reinterpret_cast<CopyImageInfo2KHR const *>( &rhs ) )
  22052. {}
  22053. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22054. CopyImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ )
  22055. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  22056. {}
  22057. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22058. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22059. VULKAN_HPP_CONSTEXPR_14 CopyImageInfo2KHR & operator=( CopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22060. CopyImageInfo2KHR & operator=( VkCopyImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22061. {
  22062. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR const *>( &rhs );
  22063. return *this;
  22064. }
  22065. CopyImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22066. {
  22067. pNext = pNext_;
  22068. return *this;
  22069. }
  22070. CopyImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
  22071. {
  22072. srcImage = srcImage_;
  22073. return *this;
  22074. }
  22075. CopyImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
  22076. {
  22077. srcImageLayout = srcImageLayout_;
  22078. return *this;
  22079. }
  22080. CopyImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
  22081. {
  22082. dstImage = dstImage_;
  22083. return *this;
  22084. }
  22085. CopyImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
  22086. {
  22087. dstImageLayout = dstImageLayout_;
  22088. return *this;
  22089. }
  22090. CopyImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  22091. {
  22092. regionCount = regionCount_;
  22093. return *this;
  22094. }
  22095. CopyImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  22096. {
  22097. pRegions = pRegions_;
  22098. return *this;
  22099. }
  22100. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22101. CopyImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  22102. {
  22103. regionCount = static_cast<uint32_t>( regions_.size() );
  22104. pRegions = regions_.data();
  22105. return *this;
  22106. }
  22107. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22108. operator VkCopyImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  22109. {
  22110. return *reinterpret_cast<const VkCopyImageInfo2KHR*>( this );
  22111. }
  22112. operator VkCopyImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
  22113. {
  22114. return *reinterpret_cast<VkCopyImageInfo2KHR*>( this );
  22115. }
  22116. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22117. auto operator<=>( CopyImageInfo2KHR const& ) const = default;
  22118. #else
  22119. bool operator==( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22120. {
  22121. return ( sType == rhs.sType )
  22122. && ( pNext == rhs.pNext )
  22123. && ( srcImage == rhs.srcImage )
  22124. && ( srcImageLayout == rhs.srcImageLayout )
  22125. && ( dstImage == rhs.dstImage )
  22126. && ( dstImageLayout == rhs.dstImageLayout )
  22127. && ( regionCount == rhs.regionCount )
  22128. && ( pRegions == rhs.pRegions );
  22129. }
  22130. bool operator!=( CopyImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22131. {
  22132. return !operator==( rhs );
  22133. }
  22134. #endif
  22135. public:
  22136. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageInfo2KHR;
  22137. const void* pNext = {};
  22138. VULKAN_HPP_NAMESPACE::Image srcImage = {};
  22139. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  22140. VULKAN_HPP_NAMESPACE::Image dstImage = {};
  22141. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  22142. uint32_t regionCount = {};
  22143. const VULKAN_HPP_NAMESPACE::ImageCopy2KHR* pRegions = {};
  22144. };
  22145. static_assert( sizeof( CopyImageInfo2KHR ) == sizeof( VkCopyImageInfo2KHR ), "struct and wrapper have different size!" );
  22146. static_assert( std::is_standard_layout<CopyImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  22147. template <>
  22148. struct CppType<StructureType, StructureType::eCopyImageInfo2KHR>
  22149. {
  22150. using Type = CopyImageInfo2KHR;
  22151. };
  22152. struct CopyImageToBufferInfo2KHR
  22153. {
  22154. static const bool allowDuplicate = false;
  22155. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyImageToBufferInfo2KHR;
  22156. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22157. VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ = {}, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  22158. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
  22159. {}
  22160. VULKAN_HPP_CONSTEXPR CopyImageToBufferInfo2KHR( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22161. CopyImageToBufferInfo2KHR( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22162. : CopyImageToBufferInfo2KHR( *reinterpret_cast<CopyImageToBufferInfo2KHR const *>( &rhs ) )
  22163. {}
  22164. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22165. CopyImageToBufferInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Buffer dstBuffer_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ )
  22166. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstBuffer( dstBuffer_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  22167. {}
  22168. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22169. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22170. VULKAN_HPP_CONSTEXPR_14 CopyImageToBufferInfo2KHR & operator=( CopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22171. CopyImageToBufferInfo2KHR & operator=( VkCopyImageToBufferInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22172. {
  22173. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR const *>( &rhs );
  22174. return *this;
  22175. }
  22176. CopyImageToBufferInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22177. {
  22178. pNext = pNext_;
  22179. return *this;
  22180. }
  22181. CopyImageToBufferInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
  22182. {
  22183. srcImage = srcImage_;
  22184. return *this;
  22185. }
  22186. CopyImageToBufferInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
  22187. {
  22188. srcImageLayout = srcImageLayout_;
  22189. return *this;
  22190. }
  22191. CopyImageToBufferInfo2KHR & setDstBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer_ ) VULKAN_HPP_NOEXCEPT
  22192. {
  22193. dstBuffer = dstBuffer_;
  22194. return *this;
  22195. }
  22196. CopyImageToBufferInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  22197. {
  22198. regionCount = regionCount_;
  22199. return *this;
  22200. }
  22201. CopyImageToBufferInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  22202. {
  22203. pRegions = pRegions_;
  22204. return *this;
  22205. }
  22206. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22207. CopyImageToBufferInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  22208. {
  22209. regionCount = static_cast<uint32_t>( regions_.size() );
  22210. pRegions = regions_.data();
  22211. return *this;
  22212. }
  22213. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22214. operator VkCopyImageToBufferInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  22215. {
  22216. return *reinterpret_cast<const VkCopyImageToBufferInfo2KHR*>( this );
  22217. }
  22218. operator VkCopyImageToBufferInfo2KHR &() VULKAN_HPP_NOEXCEPT
  22219. {
  22220. return *reinterpret_cast<VkCopyImageToBufferInfo2KHR*>( this );
  22221. }
  22222. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22223. auto operator<=>( CopyImageToBufferInfo2KHR const& ) const = default;
  22224. #else
  22225. bool operator==( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22226. {
  22227. return ( sType == rhs.sType )
  22228. && ( pNext == rhs.pNext )
  22229. && ( srcImage == rhs.srcImage )
  22230. && ( srcImageLayout == rhs.srcImageLayout )
  22231. && ( dstBuffer == rhs.dstBuffer )
  22232. && ( regionCount == rhs.regionCount )
  22233. && ( pRegions == rhs.pRegions );
  22234. }
  22235. bool operator!=( CopyImageToBufferInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22236. {
  22237. return !operator==( rhs );
  22238. }
  22239. #endif
  22240. public:
  22241. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyImageToBufferInfo2KHR;
  22242. const void* pNext = {};
  22243. VULKAN_HPP_NAMESPACE::Image srcImage = {};
  22244. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  22245. VULKAN_HPP_NAMESPACE::Buffer dstBuffer = {};
  22246. uint32_t regionCount = {};
  22247. const VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR* pRegions = {};
  22248. };
  22249. static_assert( sizeof( CopyImageToBufferInfo2KHR ) == sizeof( VkCopyImageToBufferInfo2KHR ), "struct and wrapper have different size!" );
  22250. static_assert( std::is_standard_layout<CopyImageToBufferInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  22251. template <>
  22252. struct CppType<StructureType, StructureType::eCopyImageToBufferInfo2KHR>
  22253. {
  22254. using Type = CopyImageToBufferInfo2KHR;
  22255. };
  22256. struct CopyMemoryToAccelerationStructureInfoKHR
  22257. {
  22258. static const bool allowDuplicate = false;
  22259. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
  22260. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22261. CopyMemoryToAccelerationStructureInfoKHR(VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src_ = {}, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ = {}, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone) VULKAN_HPP_NOEXCEPT
  22262. : src( src_ ), dst( dst_ ), mode( mode_ )
  22263. {}
  22264. CopyMemoryToAccelerationStructureInfoKHR( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22265. CopyMemoryToAccelerationStructureInfoKHR( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22266. : CopyMemoryToAccelerationStructureInfoKHR( *reinterpret_cast<CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs ) )
  22267. {}
  22268. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22269. CopyMemoryToAccelerationStructureInfoKHR & operator=( CopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22270. CopyMemoryToAccelerationStructureInfoKHR & operator=( VkCopyMemoryToAccelerationStructureInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22271. {
  22272. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR const *>( &rhs );
  22273. return *this;
  22274. }
  22275. CopyMemoryToAccelerationStructureInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22276. {
  22277. pNext = pNext_;
  22278. return *this;
  22279. }
  22280. CopyMemoryToAccelerationStructureInfoKHR & setSrc( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR const & src_ ) VULKAN_HPP_NOEXCEPT
  22281. {
  22282. src = src_;
  22283. return *this;
  22284. }
  22285. CopyMemoryToAccelerationStructureInfoKHR & setDst( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst_ ) VULKAN_HPP_NOEXCEPT
  22286. {
  22287. dst = dst_;
  22288. return *this;
  22289. }
  22290. CopyMemoryToAccelerationStructureInfoKHR & setMode( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
  22291. {
  22292. mode = mode_;
  22293. return *this;
  22294. }
  22295. operator VkCopyMemoryToAccelerationStructureInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  22296. {
  22297. return *reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
  22298. }
  22299. operator VkCopyMemoryToAccelerationStructureInfoKHR &() VULKAN_HPP_NOEXCEPT
  22300. {
  22301. return *reinterpret_cast<VkCopyMemoryToAccelerationStructureInfoKHR*>( this );
  22302. }
  22303. public:
  22304. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eCopyMemoryToAccelerationStructureInfoKHR;
  22305. const void* pNext = {};
  22306. VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR src = {};
  22307. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR dst = {};
  22308. VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode = VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR::eClone;
  22309. };
  22310. static_assert( sizeof( CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ), "struct and wrapper have different size!" );
  22311. static_assert( std::is_standard_layout<CopyMemoryToAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
  22312. template <>
  22313. struct CppType<StructureType, StructureType::eCopyMemoryToAccelerationStructureInfoKHR>
  22314. {
  22315. using Type = CopyMemoryToAccelerationStructureInfoKHR;
  22316. };
  22317. #ifdef VK_USE_PLATFORM_WIN32_KHR
  22318. struct D3D12FenceSubmitInfoKHR
  22319. {
  22320. static const bool allowDuplicate = false;
  22321. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eD3D12FenceSubmitInfoKHR;
  22322. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22323. VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR(uint32_t waitSemaphoreValuesCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValuesCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
  22324. : waitSemaphoreValuesCount( waitSemaphoreValuesCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValuesCount( signalSemaphoreValuesCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
  22325. {}
  22326. VULKAN_HPP_CONSTEXPR D3D12FenceSubmitInfoKHR( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22327. D3D12FenceSubmitInfoKHR( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22328. : D3D12FenceSubmitInfoKHR( *reinterpret_cast<D3D12FenceSubmitInfoKHR const *>( &rhs ) )
  22329. {}
  22330. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22331. D3D12FenceSubmitInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
  22332. : waitSemaphoreValuesCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValuesCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
  22333. {}
  22334. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22335. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22336. VULKAN_HPP_CONSTEXPR_14 D3D12FenceSubmitInfoKHR & operator=( D3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22337. D3D12FenceSubmitInfoKHR & operator=( VkD3D12FenceSubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  22338. {
  22339. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR const *>( &rhs );
  22340. return *this;
  22341. }
  22342. D3D12FenceSubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22343. {
  22344. pNext = pNext_;
  22345. return *this;
  22346. }
  22347. D3D12FenceSubmitInfoKHR & setWaitSemaphoreValuesCount( uint32_t waitSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
  22348. {
  22349. waitSemaphoreValuesCount = waitSemaphoreValuesCount_;
  22350. return *this;
  22351. }
  22352. D3D12FenceSubmitInfoKHR & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  22353. {
  22354. pWaitSemaphoreValues = pWaitSemaphoreValues_;
  22355. return *this;
  22356. }
  22357. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22358. D3D12FenceSubmitInfoKHR & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  22359. {
  22360. waitSemaphoreValuesCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
  22361. pWaitSemaphoreValues = waitSemaphoreValues_.data();
  22362. return *this;
  22363. }
  22364. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22365. D3D12FenceSubmitInfoKHR & setSignalSemaphoreValuesCount( uint32_t signalSemaphoreValuesCount_ ) VULKAN_HPP_NOEXCEPT
  22366. {
  22367. signalSemaphoreValuesCount = signalSemaphoreValuesCount_;
  22368. return *this;
  22369. }
  22370. D3D12FenceSubmitInfoKHR & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  22371. {
  22372. pSignalSemaphoreValues = pSignalSemaphoreValues_;
  22373. return *this;
  22374. }
  22375. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22376. D3D12FenceSubmitInfoKHR & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  22377. {
  22378. signalSemaphoreValuesCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
  22379. pSignalSemaphoreValues = signalSemaphoreValues_.data();
  22380. return *this;
  22381. }
  22382. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22383. operator VkD3D12FenceSubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  22384. {
  22385. return *reinterpret_cast<const VkD3D12FenceSubmitInfoKHR*>( this );
  22386. }
  22387. operator VkD3D12FenceSubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
  22388. {
  22389. return *reinterpret_cast<VkD3D12FenceSubmitInfoKHR*>( this );
  22390. }
  22391. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22392. auto operator<=>( D3D12FenceSubmitInfoKHR const& ) const = default;
  22393. #else
  22394. bool operator==( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22395. {
  22396. return ( sType == rhs.sType )
  22397. && ( pNext == rhs.pNext )
  22398. && ( waitSemaphoreValuesCount == rhs.waitSemaphoreValuesCount )
  22399. && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
  22400. && ( signalSemaphoreValuesCount == rhs.signalSemaphoreValuesCount )
  22401. && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
  22402. }
  22403. bool operator!=( D3D12FenceSubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  22404. {
  22405. return !operator==( rhs );
  22406. }
  22407. #endif
  22408. public:
  22409. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eD3D12FenceSubmitInfoKHR;
  22410. const void* pNext = {};
  22411. uint32_t waitSemaphoreValuesCount = {};
  22412. const uint64_t* pWaitSemaphoreValues = {};
  22413. uint32_t signalSemaphoreValuesCount = {};
  22414. const uint64_t* pSignalSemaphoreValues = {};
  22415. };
  22416. static_assert( sizeof( D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ), "struct and wrapper have different size!" );
  22417. static_assert( std::is_standard_layout<D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
  22418. template <>
  22419. struct CppType<StructureType, StructureType::eD3D12FenceSubmitInfoKHR>
  22420. {
  22421. using Type = D3D12FenceSubmitInfoKHR;
  22422. };
  22423. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  22424. struct DebugMarkerMarkerInfoEXT
  22425. {
  22426. static const bool allowDuplicate = false;
  22427. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerMarkerInfoEXT;
  22428. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22429. VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT(const char* pMarkerName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
  22430. : pMarkerName( pMarkerName_ ), color( color_ )
  22431. {}
  22432. VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22433. DebugMarkerMarkerInfoEXT( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22434. : DebugMarkerMarkerInfoEXT( *reinterpret_cast<DebugMarkerMarkerInfoEXT const *>( &rhs ) )
  22435. {}
  22436. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22437. VULKAN_HPP_CONSTEXPR_14 DebugMarkerMarkerInfoEXT & operator=( DebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22438. DebugMarkerMarkerInfoEXT & operator=( VkDebugMarkerMarkerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22439. {
  22440. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT const *>( &rhs );
  22441. return *this;
  22442. }
  22443. DebugMarkerMarkerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22444. {
  22445. pNext = pNext_;
  22446. return *this;
  22447. }
  22448. DebugMarkerMarkerInfoEXT & setPMarkerName( const char* pMarkerName_ ) VULKAN_HPP_NOEXCEPT
  22449. {
  22450. pMarkerName = pMarkerName_;
  22451. return *this;
  22452. }
  22453. DebugMarkerMarkerInfoEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
  22454. {
  22455. color = color_;
  22456. return *this;
  22457. }
  22458. operator VkDebugMarkerMarkerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  22459. {
  22460. return *reinterpret_cast<const VkDebugMarkerMarkerInfoEXT*>( this );
  22461. }
  22462. operator VkDebugMarkerMarkerInfoEXT &() VULKAN_HPP_NOEXCEPT
  22463. {
  22464. return *reinterpret_cast<VkDebugMarkerMarkerInfoEXT*>( this );
  22465. }
  22466. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22467. auto operator<=>( DebugMarkerMarkerInfoEXT const& ) const = default;
  22468. #else
  22469. bool operator==( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22470. {
  22471. return ( sType == rhs.sType )
  22472. && ( pNext == rhs.pNext )
  22473. && ( pMarkerName == rhs.pMarkerName )
  22474. && ( color == rhs.color );
  22475. }
  22476. bool operator!=( DebugMarkerMarkerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22477. {
  22478. return !operator==( rhs );
  22479. }
  22480. #endif
  22481. public:
  22482. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerMarkerInfoEXT;
  22483. const void* pNext = {};
  22484. const char* pMarkerName = {};
  22485. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
  22486. };
  22487. static_assert( sizeof( DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ), "struct and wrapper have different size!" );
  22488. static_assert( std::is_standard_layout<DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
  22489. template <>
  22490. struct CppType<StructureType, StructureType::eDebugMarkerMarkerInfoEXT>
  22491. {
  22492. using Type = DebugMarkerMarkerInfoEXT;
  22493. };
  22494. struct DebugMarkerObjectNameInfoEXT
  22495. {
  22496. static const bool allowDuplicate = false;
  22497. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectNameInfoEXT;
  22498. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22499. VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
  22500. : objectType( objectType_ ), object( object_ ), pObjectName( pObjectName_ )
  22501. {}
  22502. VULKAN_HPP_CONSTEXPR DebugMarkerObjectNameInfoEXT( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22503. DebugMarkerObjectNameInfoEXT( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22504. : DebugMarkerObjectNameInfoEXT( *reinterpret_cast<DebugMarkerObjectNameInfoEXT const *>( &rhs ) )
  22505. {}
  22506. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22507. VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectNameInfoEXT & operator=( DebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22508. DebugMarkerObjectNameInfoEXT & operator=( VkDebugMarkerObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22509. {
  22510. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT const *>( &rhs );
  22511. return *this;
  22512. }
  22513. DebugMarkerObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22514. {
  22515. pNext = pNext_;
  22516. return *this;
  22517. }
  22518. DebugMarkerObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
  22519. {
  22520. objectType = objectType_;
  22521. return *this;
  22522. }
  22523. DebugMarkerObjectNameInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
  22524. {
  22525. object = object_;
  22526. return *this;
  22527. }
  22528. DebugMarkerObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
  22529. {
  22530. pObjectName = pObjectName_;
  22531. return *this;
  22532. }
  22533. operator VkDebugMarkerObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  22534. {
  22535. return *reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT*>( this );
  22536. }
  22537. operator VkDebugMarkerObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
  22538. {
  22539. return *reinterpret_cast<VkDebugMarkerObjectNameInfoEXT*>( this );
  22540. }
  22541. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22542. auto operator<=>( DebugMarkerObjectNameInfoEXT const& ) const = default;
  22543. #else
  22544. bool operator==( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22545. {
  22546. return ( sType == rhs.sType )
  22547. && ( pNext == rhs.pNext )
  22548. && ( objectType == rhs.objectType )
  22549. && ( object == rhs.object )
  22550. && ( pObjectName == rhs.pObjectName );
  22551. }
  22552. bool operator!=( DebugMarkerObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22553. {
  22554. return !operator==( rhs );
  22555. }
  22556. #endif
  22557. public:
  22558. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectNameInfoEXT;
  22559. const void* pNext = {};
  22560. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  22561. uint64_t object = {};
  22562. const char* pObjectName = {};
  22563. };
  22564. static_assert( sizeof( DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ), "struct and wrapper have different size!" );
  22565. static_assert( std::is_standard_layout<DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
  22566. template <>
  22567. struct CppType<StructureType, StructureType::eDebugMarkerObjectNameInfoEXT>
  22568. {
  22569. using Type = DebugMarkerObjectNameInfoEXT;
  22570. };
  22571. struct DebugMarkerObjectTagInfoEXT
  22572. {
  22573. static const bool allowDuplicate = false;
  22574. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugMarkerObjectTagInfoEXT;
  22575. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22576. VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown, uint64_t object_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
  22577. : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
  22578. {}
  22579. VULKAN_HPP_CONSTEXPR DebugMarkerObjectTagInfoEXT( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22580. DebugMarkerObjectTagInfoEXT( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22581. : DebugMarkerObjectTagInfoEXT( *reinterpret_cast<DebugMarkerObjectTagInfoEXT const *>( &rhs ) )
  22582. {}
  22583. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22584. template <typename T>
  22585. DebugMarkerObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_, uint64_t object_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
  22586. : objectType( objectType_ ), object( object_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
  22587. {}
  22588. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22589. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22590. VULKAN_HPP_CONSTEXPR_14 DebugMarkerObjectTagInfoEXT & operator=( DebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22591. DebugMarkerObjectTagInfoEXT & operator=( VkDebugMarkerObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22592. {
  22593. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT const *>( &rhs );
  22594. return *this;
  22595. }
  22596. DebugMarkerObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22597. {
  22598. pNext = pNext_;
  22599. return *this;
  22600. }
  22601. DebugMarkerObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType_ ) VULKAN_HPP_NOEXCEPT
  22602. {
  22603. objectType = objectType_;
  22604. return *this;
  22605. }
  22606. DebugMarkerObjectTagInfoEXT & setObject( uint64_t object_ ) VULKAN_HPP_NOEXCEPT
  22607. {
  22608. object = object_;
  22609. return *this;
  22610. }
  22611. DebugMarkerObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
  22612. {
  22613. tagName = tagName_;
  22614. return *this;
  22615. }
  22616. DebugMarkerObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
  22617. {
  22618. tagSize = tagSize_;
  22619. return *this;
  22620. }
  22621. DebugMarkerObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
  22622. {
  22623. pTag = pTag_;
  22624. return *this;
  22625. }
  22626. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22627. template <typename T>
  22628. DebugMarkerObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
  22629. {
  22630. tagSize = tag_.size() * sizeof(T);
  22631. pTag = tag_.data();
  22632. return *this;
  22633. }
  22634. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22635. operator VkDebugMarkerObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  22636. {
  22637. return *reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT*>( this );
  22638. }
  22639. operator VkDebugMarkerObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
  22640. {
  22641. return *reinterpret_cast<VkDebugMarkerObjectTagInfoEXT*>( this );
  22642. }
  22643. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22644. auto operator<=>( DebugMarkerObjectTagInfoEXT const& ) const = default;
  22645. #else
  22646. bool operator==( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22647. {
  22648. return ( sType == rhs.sType )
  22649. && ( pNext == rhs.pNext )
  22650. && ( objectType == rhs.objectType )
  22651. && ( object == rhs.object )
  22652. && ( tagName == rhs.tagName )
  22653. && ( tagSize == rhs.tagSize )
  22654. && ( pTag == rhs.pTag );
  22655. }
  22656. bool operator!=( DebugMarkerObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22657. {
  22658. return !operator==( rhs );
  22659. }
  22660. #endif
  22661. public:
  22662. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugMarkerObjectTagInfoEXT;
  22663. const void* pNext = {};
  22664. VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  22665. uint64_t object = {};
  22666. uint64_t tagName = {};
  22667. size_t tagSize = {};
  22668. const void* pTag = {};
  22669. };
  22670. static_assert( sizeof( DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ), "struct and wrapper have different size!" );
  22671. static_assert( std::is_standard_layout<DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
  22672. template <>
  22673. struct CppType<StructureType, StructureType::eDebugMarkerObjectTagInfoEXT>
  22674. {
  22675. using Type = DebugMarkerObjectTagInfoEXT;
  22676. };
  22677. struct DebugReportCallbackCreateInfoEXT
  22678. {
  22679. static const bool allowDuplicate = false;
  22680. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugReportCallbackCreateInfoEXT;
  22681. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22682. VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ = {}, PFN_vkDebugReportCallbackEXT pfnCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
  22683. : flags( flags_ ), pfnCallback( pfnCallback_ ), pUserData( pUserData_ )
  22684. {}
  22685. VULKAN_HPP_CONSTEXPR DebugReportCallbackCreateInfoEXT( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22686. DebugReportCallbackCreateInfoEXT( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22687. : DebugReportCallbackCreateInfoEXT( *reinterpret_cast<DebugReportCallbackCreateInfoEXT const *>( &rhs ) )
  22688. {}
  22689. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22690. VULKAN_HPP_CONSTEXPR_14 DebugReportCallbackCreateInfoEXT & operator=( DebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22691. DebugReportCallbackCreateInfoEXT & operator=( VkDebugReportCallbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22692. {
  22693. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT const *>( &rhs );
  22694. return *this;
  22695. }
  22696. DebugReportCallbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22697. {
  22698. pNext = pNext_;
  22699. return *this;
  22700. }
  22701. DebugReportCallbackCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  22702. {
  22703. flags = flags_;
  22704. return *this;
  22705. }
  22706. DebugReportCallbackCreateInfoEXT & setPfnCallback( PFN_vkDebugReportCallbackEXT pfnCallback_ ) VULKAN_HPP_NOEXCEPT
  22707. {
  22708. pfnCallback = pfnCallback_;
  22709. return *this;
  22710. }
  22711. DebugReportCallbackCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
  22712. {
  22713. pUserData = pUserData_;
  22714. return *this;
  22715. }
  22716. operator VkDebugReportCallbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  22717. {
  22718. return *reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT*>( this );
  22719. }
  22720. operator VkDebugReportCallbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  22721. {
  22722. return *reinterpret_cast<VkDebugReportCallbackCreateInfoEXT*>( this );
  22723. }
  22724. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22725. auto operator<=>( DebugReportCallbackCreateInfoEXT const& ) const = default;
  22726. #else
  22727. bool operator==( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22728. {
  22729. return ( sType == rhs.sType )
  22730. && ( pNext == rhs.pNext )
  22731. && ( flags == rhs.flags )
  22732. && ( pfnCallback == rhs.pfnCallback )
  22733. && ( pUserData == rhs.pUserData );
  22734. }
  22735. bool operator!=( DebugReportCallbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22736. {
  22737. return !operator==( rhs );
  22738. }
  22739. #endif
  22740. public:
  22741. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugReportCallbackCreateInfoEXT;
  22742. const void* pNext = {};
  22743. VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags = {};
  22744. PFN_vkDebugReportCallbackEXT pfnCallback = {};
  22745. void* pUserData = {};
  22746. };
  22747. static_assert( sizeof( DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ), "struct and wrapper have different size!" );
  22748. static_assert( std::is_standard_layout<DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  22749. template <>
  22750. struct CppType<StructureType, StructureType::eDebugReportCallbackCreateInfoEXT>
  22751. {
  22752. using Type = DebugReportCallbackCreateInfoEXT;
  22753. };
  22754. struct DebugUtilsLabelEXT
  22755. {
  22756. static const bool allowDuplicate = false;
  22757. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsLabelEXT;
  22758. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22759. VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT(const char* pLabelName_ = {}, std::array<float,4> const& color_ = {}) VULKAN_HPP_NOEXCEPT
  22760. : pLabelName( pLabelName_ ), color( color_ )
  22761. {}
  22762. VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22763. DebugUtilsLabelEXT( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22764. : DebugUtilsLabelEXT( *reinterpret_cast<DebugUtilsLabelEXT const *>( &rhs ) )
  22765. {}
  22766. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22767. VULKAN_HPP_CONSTEXPR_14 DebugUtilsLabelEXT & operator=( DebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22768. DebugUtilsLabelEXT & operator=( VkDebugUtilsLabelEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22769. {
  22770. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT const *>( &rhs );
  22771. return *this;
  22772. }
  22773. DebugUtilsLabelEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22774. {
  22775. pNext = pNext_;
  22776. return *this;
  22777. }
  22778. DebugUtilsLabelEXT & setPLabelName( const char* pLabelName_ ) VULKAN_HPP_NOEXCEPT
  22779. {
  22780. pLabelName = pLabelName_;
  22781. return *this;
  22782. }
  22783. DebugUtilsLabelEXT & setColor( std::array<float,4> color_ ) VULKAN_HPP_NOEXCEPT
  22784. {
  22785. color = color_;
  22786. return *this;
  22787. }
  22788. operator VkDebugUtilsLabelEXT const&() const VULKAN_HPP_NOEXCEPT
  22789. {
  22790. return *reinterpret_cast<const VkDebugUtilsLabelEXT*>( this );
  22791. }
  22792. operator VkDebugUtilsLabelEXT &() VULKAN_HPP_NOEXCEPT
  22793. {
  22794. return *reinterpret_cast<VkDebugUtilsLabelEXT*>( this );
  22795. }
  22796. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22797. auto operator<=>( DebugUtilsLabelEXT const& ) const = default;
  22798. #else
  22799. bool operator==( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22800. {
  22801. return ( sType == rhs.sType )
  22802. && ( pNext == rhs.pNext )
  22803. && ( pLabelName == rhs.pLabelName )
  22804. && ( color == rhs.color );
  22805. }
  22806. bool operator!=( DebugUtilsLabelEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22807. {
  22808. return !operator==( rhs );
  22809. }
  22810. #endif
  22811. public:
  22812. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsLabelEXT;
  22813. const void* pNext = {};
  22814. const char* pLabelName = {};
  22815. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> color = {};
  22816. };
  22817. static_assert( sizeof( DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
  22818. static_assert( std::is_standard_layout<DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
  22819. template <>
  22820. struct CppType<StructureType, StructureType::eDebugUtilsLabelEXT>
  22821. {
  22822. using Type = DebugUtilsLabelEXT;
  22823. };
  22824. struct DebugUtilsObjectNameInfoEXT
  22825. {
  22826. static const bool allowDuplicate = false;
  22827. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectNameInfoEXT;
  22828. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22829. VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, const char* pObjectName_ = {}) VULKAN_HPP_NOEXCEPT
  22830. : objectType( objectType_ ), objectHandle( objectHandle_ ), pObjectName( pObjectName_ )
  22831. {}
  22832. VULKAN_HPP_CONSTEXPR DebugUtilsObjectNameInfoEXT( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22833. DebugUtilsObjectNameInfoEXT( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22834. : DebugUtilsObjectNameInfoEXT( *reinterpret_cast<DebugUtilsObjectNameInfoEXT const *>( &rhs ) )
  22835. {}
  22836. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22837. VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectNameInfoEXT & operator=( DebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22838. DebugUtilsObjectNameInfoEXT & operator=( VkDebugUtilsObjectNameInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22839. {
  22840. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT const *>( &rhs );
  22841. return *this;
  22842. }
  22843. DebugUtilsObjectNameInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22844. {
  22845. pNext = pNext_;
  22846. return *this;
  22847. }
  22848. DebugUtilsObjectNameInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
  22849. {
  22850. objectType = objectType_;
  22851. return *this;
  22852. }
  22853. DebugUtilsObjectNameInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
  22854. {
  22855. objectHandle = objectHandle_;
  22856. return *this;
  22857. }
  22858. DebugUtilsObjectNameInfoEXT & setPObjectName( const char* pObjectName_ ) VULKAN_HPP_NOEXCEPT
  22859. {
  22860. pObjectName = pObjectName_;
  22861. return *this;
  22862. }
  22863. operator VkDebugUtilsObjectNameInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  22864. {
  22865. return *reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT*>( this );
  22866. }
  22867. operator VkDebugUtilsObjectNameInfoEXT &() VULKAN_HPP_NOEXCEPT
  22868. {
  22869. return *reinterpret_cast<VkDebugUtilsObjectNameInfoEXT*>( this );
  22870. }
  22871. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  22872. auto operator<=>( DebugUtilsObjectNameInfoEXT const& ) const = default;
  22873. #else
  22874. bool operator==( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22875. {
  22876. return ( sType == rhs.sType )
  22877. && ( pNext == rhs.pNext )
  22878. && ( objectType == rhs.objectType )
  22879. && ( objectHandle == rhs.objectHandle )
  22880. && ( pObjectName == rhs.pObjectName );
  22881. }
  22882. bool operator!=( DebugUtilsObjectNameInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  22883. {
  22884. return !operator==( rhs );
  22885. }
  22886. #endif
  22887. public:
  22888. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectNameInfoEXT;
  22889. const void* pNext = {};
  22890. VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
  22891. uint64_t objectHandle = {};
  22892. const char* pObjectName = {};
  22893. };
  22894. static_assert( sizeof( DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ), "struct and wrapper have different size!" );
  22895. static_assert( std::is_standard_layout<DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
  22896. template <>
  22897. struct CppType<StructureType, StructureType::eDebugUtilsObjectNameInfoEXT>
  22898. {
  22899. using Type = DebugUtilsObjectNameInfoEXT;
  22900. };
  22901. struct DebugUtilsMessengerCallbackDataEXT
  22902. {
  22903. static const bool allowDuplicate = false;
  22904. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
  22905. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22906. VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ = {}, const char* pMessageIdName_ = {}, int32_t messageIdNumber_ = {}, const char* pMessage_ = {}, uint32_t queueLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ = {}, uint32_t cmdBufLabelCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ = {}, uint32_t objectCount_ = {}, const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ = {}) VULKAN_HPP_NOEXCEPT
  22907. : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( queueLabelCount_ ), pQueueLabels( pQueueLabels_ ), cmdBufLabelCount( cmdBufLabelCount_ ), pCmdBufLabels( pCmdBufLabels_ ), objectCount( objectCount_ ), pObjects( pObjects_ )
  22908. {}
  22909. VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22910. DebugUtilsMessengerCallbackDataEXT( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22911. : DebugUtilsMessengerCallbackDataEXT( *reinterpret_cast<DebugUtilsMessengerCallbackDataEXT const *>( &rhs ) )
  22912. {}
  22913. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22914. DebugUtilsMessengerCallbackDataEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_, const char* pMessageIdName_, int32_t messageIdNumber_, const char* pMessage_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ = {} )
  22915. : flags( flags_ ), pMessageIdName( pMessageIdName_ ), messageIdNumber( messageIdNumber_ ), pMessage( pMessage_ ), queueLabelCount( static_cast<uint32_t>( queueLabels_.size() ) ), pQueueLabels( queueLabels_.data() ), cmdBufLabelCount( static_cast<uint32_t>( cmdBufLabels_.size() ) ), pCmdBufLabels( cmdBufLabels_.data() ), objectCount( static_cast<uint32_t>( objects_.size() ) ), pObjects( objects_.data() )
  22916. {}
  22917. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22918. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  22919. VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCallbackDataEXT & operator=( DebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  22920. DebugUtilsMessengerCallbackDataEXT & operator=( VkDebugUtilsMessengerCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  22921. {
  22922. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT const *>( &rhs );
  22923. return *this;
  22924. }
  22925. DebugUtilsMessengerCallbackDataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  22926. {
  22927. pNext = pNext_;
  22928. return *this;
  22929. }
  22930. DebugUtilsMessengerCallbackDataEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  22931. {
  22932. flags = flags_;
  22933. return *this;
  22934. }
  22935. DebugUtilsMessengerCallbackDataEXT & setPMessageIdName( const char* pMessageIdName_ ) VULKAN_HPP_NOEXCEPT
  22936. {
  22937. pMessageIdName = pMessageIdName_;
  22938. return *this;
  22939. }
  22940. DebugUtilsMessengerCallbackDataEXT & setMessageIdNumber( int32_t messageIdNumber_ ) VULKAN_HPP_NOEXCEPT
  22941. {
  22942. messageIdNumber = messageIdNumber_;
  22943. return *this;
  22944. }
  22945. DebugUtilsMessengerCallbackDataEXT & setPMessage( const char* pMessage_ ) VULKAN_HPP_NOEXCEPT
  22946. {
  22947. pMessage = pMessage_;
  22948. return *this;
  22949. }
  22950. DebugUtilsMessengerCallbackDataEXT & setQueueLabelCount( uint32_t queueLabelCount_ ) VULKAN_HPP_NOEXCEPT
  22951. {
  22952. queueLabelCount = queueLabelCount_;
  22953. return *this;
  22954. }
  22955. DebugUtilsMessengerCallbackDataEXT & setPQueueLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels_ ) VULKAN_HPP_NOEXCEPT
  22956. {
  22957. pQueueLabels = pQueueLabels_;
  22958. return *this;
  22959. }
  22960. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22961. DebugUtilsMessengerCallbackDataEXT & setQueueLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & queueLabels_ ) VULKAN_HPP_NOEXCEPT
  22962. {
  22963. queueLabelCount = static_cast<uint32_t>( queueLabels_.size() );
  22964. pQueueLabels = queueLabels_.data();
  22965. return *this;
  22966. }
  22967. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22968. DebugUtilsMessengerCallbackDataEXT & setCmdBufLabelCount( uint32_t cmdBufLabelCount_ ) VULKAN_HPP_NOEXCEPT
  22969. {
  22970. cmdBufLabelCount = cmdBufLabelCount_;
  22971. return *this;
  22972. }
  22973. DebugUtilsMessengerCallbackDataEXT & setPCmdBufLabels( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
  22974. {
  22975. pCmdBufLabels = pCmdBufLabels_;
  22976. return *this;
  22977. }
  22978. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22979. DebugUtilsMessengerCallbackDataEXT & setCmdBufLabels( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT> const & cmdBufLabels_ ) VULKAN_HPP_NOEXCEPT
  22980. {
  22981. cmdBufLabelCount = static_cast<uint32_t>( cmdBufLabels_.size() );
  22982. pCmdBufLabels = cmdBufLabels_.data();
  22983. return *this;
  22984. }
  22985. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22986. DebugUtilsMessengerCallbackDataEXT & setObjectCount( uint32_t objectCount_ ) VULKAN_HPP_NOEXCEPT
  22987. {
  22988. objectCount = objectCount_;
  22989. return *this;
  22990. }
  22991. DebugUtilsMessengerCallbackDataEXT & setPObjects( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects_ ) VULKAN_HPP_NOEXCEPT
  22992. {
  22993. pObjects = pObjects_;
  22994. return *this;
  22995. }
  22996. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  22997. DebugUtilsMessengerCallbackDataEXT & setObjects( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT> const & objects_ ) VULKAN_HPP_NOEXCEPT
  22998. {
  22999. objectCount = static_cast<uint32_t>( objects_.size() );
  23000. pObjects = objects_.data();
  23001. return *this;
  23002. }
  23003. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23004. operator VkDebugUtilsMessengerCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
  23005. {
  23006. return *reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT*>( this );
  23007. }
  23008. operator VkDebugUtilsMessengerCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
  23009. {
  23010. return *reinterpret_cast<VkDebugUtilsMessengerCallbackDataEXT*>( this );
  23011. }
  23012. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23013. auto operator<=>( DebugUtilsMessengerCallbackDataEXT const& ) const = default;
  23014. #else
  23015. bool operator==( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23016. {
  23017. return ( sType == rhs.sType )
  23018. && ( pNext == rhs.pNext )
  23019. && ( flags == rhs.flags )
  23020. && ( pMessageIdName == rhs.pMessageIdName )
  23021. && ( messageIdNumber == rhs.messageIdNumber )
  23022. && ( pMessage == rhs.pMessage )
  23023. && ( queueLabelCount == rhs.queueLabelCount )
  23024. && ( pQueueLabels == rhs.pQueueLabels )
  23025. && ( cmdBufLabelCount == rhs.cmdBufLabelCount )
  23026. && ( pCmdBufLabels == rhs.pCmdBufLabels )
  23027. && ( objectCount == rhs.objectCount )
  23028. && ( pObjects == rhs.pObjects );
  23029. }
  23030. bool operator!=( DebugUtilsMessengerCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23031. {
  23032. return !operator==( rhs );
  23033. }
  23034. #endif
  23035. public:
  23036. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCallbackDataEXT;
  23037. const void* pNext = {};
  23038. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT flags = {};
  23039. const char* pMessageIdName = {};
  23040. int32_t messageIdNumber = {};
  23041. const char* pMessage = {};
  23042. uint32_t queueLabelCount = {};
  23043. const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pQueueLabels = {};
  23044. uint32_t cmdBufLabelCount = {};
  23045. const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pCmdBufLabels = {};
  23046. uint32_t objectCount = {};
  23047. const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pObjects = {};
  23048. };
  23049. static_assert( sizeof( DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ), "struct and wrapper have different size!" );
  23050. static_assert( std::is_standard_layout<DebugUtilsMessengerCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
  23051. template <>
  23052. struct CppType<StructureType, StructureType::eDebugUtilsMessengerCallbackDataEXT>
  23053. {
  23054. using Type = DebugUtilsMessengerCallbackDataEXT;
  23055. };
  23056. struct DebugUtilsMessengerCreateInfoEXT
  23057. {
  23058. static const bool allowDuplicate = true;
  23059. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
  23060. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23061. VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ = {}, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ = {}, PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
  23062. : flags( flags_ ), messageSeverity( messageSeverity_ ), messageType( messageType_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
  23063. {}
  23064. VULKAN_HPP_CONSTEXPR DebugUtilsMessengerCreateInfoEXT( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23065. DebugUtilsMessengerCreateInfoEXT( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23066. : DebugUtilsMessengerCreateInfoEXT( *reinterpret_cast<DebugUtilsMessengerCreateInfoEXT const *>( &rhs ) )
  23067. {}
  23068. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23069. VULKAN_HPP_CONSTEXPR_14 DebugUtilsMessengerCreateInfoEXT & operator=( DebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23070. DebugUtilsMessengerCreateInfoEXT & operator=( VkDebugUtilsMessengerCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23071. {
  23072. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT const *>( &rhs );
  23073. return *this;
  23074. }
  23075. DebugUtilsMessengerCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23076. {
  23077. pNext = pNext_;
  23078. return *this;
  23079. }
  23080. DebugUtilsMessengerCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  23081. {
  23082. flags = flags_;
  23083. return *this;
  23084. }
  23085. DebugUtilsMessengerCreateInfoEXT & setMessageSeverity( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity_ ) VULKAN_HPP_NOEXCEPT
  23086. {
  23087. messageSeverity = messageSeverity_;
  23088. return *this;
  23089. }
  23090. DebugUtilsMessengerCreateInfoEXT & setMessageType( VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType_ ) VULKAN_HPP_NOEXCEPT
  23091. {
  23092. messageType = messageType_;
  23093. return *this;
  23094. }
  23095. DebugUtilsMessengerCreateInfoEXT & setPfnUserCallback( PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
  23096. {
  23097. pfnUserCallback = pfnUserCallback_;
  23098. return *this;
  23099. }
  23100. DebugUtilsMessengerCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
  23101. {
  23102. pUserData = pUserData_;
  23103. return *this;
  23104. }
  23105. operator VkDebugUtilsMessengerCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  23106. {
  23107. return *reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT*>( this );
  23108. }
  23109. operator VkDebugUtilsMessengerCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  23110. {
  23111. return *reinterpret_cast<VkDebugUtilsMessengerCreateInfoEXT*>( this );
  23112. }
  23113. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23114. auto operator<=>( DebugUtilsMessengerCreateInfoEXT const& ) const = default;
  23115. #else
  23116. bool operator==( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23117. {
  23118. return ( sType == rhs.sType )
  23119. && ( pNext == rhs.pNext )
  23120. && ( flags == rhs.flags )
  23121. && ( messageSeverity == rhs.messageSeverity )
  23122. && ( messageType == rhs.messageType )
  23123. && ( pfnUserCallback == rhs.pfnUserCallback )
  23124. && ( pUserData == rhs.pUserData );
  23125. }
  23126. bool operator!=( DebugUtilsMessengerCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23127. {
  23128. return !operator==( rhs );
  23129. }
  23130. #endif
  23131. public:
  23132. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsMessengerCreateInfoEXT;
  23133. const void* pNext = {};
  23134. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT flags = {};
  23135. VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT messageSeverity = {};
  23136. VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageType = {};
  23137. PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback = {};
  23138. void* pUserData = {};
  23139. };
  23140. static_assert( sizeof( DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ), "struct and wrapper have different size!" );
  23141. static_assert( std::is_standard_layout<DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  23142. template <>
  23143. struct CppType<StructureType, StructureType::eDebugUtilsMessengerCreateInfoEXT>
  23144. {
  23145. using Type = DebugUtilsMessengerCreateInfoEXT;
  23146. };
  23147. struct DebugUtilsObjectTagInfoEXT
  23148. {
  23149. static const bool allowDuplicate = false;
  23150. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDebugUtilsObjectTagInfoEXT;
  23151. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23152. VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT(VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint64_t tagName_ = {}, size_t tagSize_ = {}, const void* pTag_ = {}) VULKAN_HPP_NOEXCEPT
  23153. : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tagSize_ ), pTag( pTag_ )
  23154. {}
  23155. VULKAN_HPP_CONSTEXPR DebugUtilsObjectTagInfoEXT( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23156. DebugUtilsObjectTagInfoEXT( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23157. : DebugUtilsObjectTagInfoEXT( *reinterpret_cast<DebugUtilsObjectTagInfoEXT const *>( &rhs ) )
  23158. {}
  23159. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23160. template <typename T>
  23161. DebugUtilsObjectTagInfoEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType_, uint64_t objectHandle_, uint64_t tagName_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ )
  23162. : objectType( objectType_ ), objectHandle( objectHandle_ ), tagName( tagName_ ), tagSize( tag_.size() * sizeof(T) ), pTag( tag_.data() )
  23163. {}
  23164. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23165. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23166. VULKAN_HPP_CONSTEXPR_14 DebugUtilsObjectTagInfoEXT & operator=( DebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23167. DebugUtilsObjectTagInfoEXT & operator=( VkDebugUtilsObjectTagInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23168. {
  23169. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT const *>( &rhs );
  23170. return *this;
  23171. }
  23172. DebugUtilsObjectTagInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23173. {
  23174. pNext = pNext_;
  23175. return *this;
  23176. }
  23177. DebugUtilsObjectTagInfoEXT & setObjectType( VULKAN_HPP_NAMESPACE::ObjectType objectType_ ) VULKAN_HPP_NOEXCEPT
  23178. {
  23179. objectType = objectType_;
  23180. return *this;
  23181. }
  23182. DebugUtilsObjectTagInfoEXT & setObjectHandle( uint64_t objectHandle_ ) VULKAN_HPP_NOEXCEPT
  23183. {
  23184. objectHandle = objectHandle_;
  23185. return *this;
  23186. }
  23187. DebugUtilsObjectTagInfoEXT & setTagName( uint64_t tagName_ ) VULKAN_HPP_NOEXCEPT
  23188. {
  23189. tagName = tagName_;
  23190. return *this;
  23191. }
  23192. DebugUtilsObjectTagInfoEXT & setTagSize( size_t tagSize_ ) VULKAN_HPP_NOEXCEPT
  23193. {
  23194. tagSize = tagSize_;
  23195. return *this;
  23196. }
  23197. DebugUtilsObjectTagInfoEXT & setPTag( const void* pTag_ ) VULKAN_HPP_NOEXCEPT
  23198. {
  23199. pTag = pTag_;
  23200. return *this;
  23201. }
  23202. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23203. template <typename T>
  23204. DebugUtilsObjectTagInfoEXT & setTag( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & tag_ ) VULKAN_HPP_NOEXCEPT
  23205. {
  23206. tagSize = tag_.size() * sizeof(T);
  23207. pTag = tag_.data();
  23208. return *this;
  23209. }
  23210. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23211. operator VkDebugUtilsObjectTagInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  23212. {
  23213. return *reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT*>( this );
  23214. }
  23215. operator VkDebugUtilsObjectTagInfoEXT &() VULKAN_HPP_NOEXCEPT
  23216. {
  23217. return *reinterpret_cast<VkDebugUtilsObjectTagInfoEXT*>( this );
  23218. }
  23219. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23220. auto operator<=>( DebugUtilsObjectTagInfoEXT const& ) const = default;
  23221. #else
  23222. bool operator==( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23223. {
  23224. return ( sType == rhs.sType )
  23225. && ( pNext == rhs.pNext )
  23226. && ( objectType == rhs.objectType )
  23227. && ( objectHandle == rhs.objectHandle )
  23228. && ( tagName == rhs.tagName )
  23229. && ( tagSize == rhs.tagSize )
  23230. && ( pTag == rhs.pTag );
  23231. }
  23232. bool operator!=( DebugUtilsObjectTagInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23233. {
  23234. return !operator==( rhs );
  23235. }
  23236. #endif
  23237. public:
  23238. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDebugUtilsObjectTagInfoEXT;
  23239. const void* pNext = {};
  23240. VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
  23241. uint64_t objectHandle = {};
  23242. uint64_t tagName = {};
  23243. size_t tagSize = {};
  23244. const void* pTag = {};
  23245. };
  23246. static_assert( sizeof( DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ), "struct and wrapper have different size!" );
  23247. static_assert( std::is_standard_layout<DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
  23248. template <>
  23249. struct CppType<StructureType, StructureType::eDebugUtilsObjectTagInfoEXT>
  23250. {
  23251. using Type = DebugUtilsObjectTagInfoEXT;
  23252. };
  23253. struct DedicatedAllocationBufferCreateInfoNV
  23254. {
  23255. static const bool allowDuplicate = false;
  23256. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
  23257. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23258. VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
  23259. : dedicatedAllocation( dedicatedAllocation_ )
  23260. {}
  23261. VULKAN_HPP_CONSTEXPR DedicatedAllocationBufferCreateInfoNV( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23262. DedicatedAllocationBufferCreateInfoNV( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23263. : DedicatedAllocationBufferCreateInfoNV( *reinterpret_cast<DedicatedAllocationBufferCreateInfoNV const *>( &rhs ) )
  23264. {}
  23265. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23266. VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationBufferCreateInfoNV & operator=( DedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23267. DedicatedAllocationBufferCreateInfoNV & operator=( VkDedicatedAllocationBufferCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23268. {
  23269. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV const *>( &rhs );
  23270. return *this;
  23271. }
  23272. DedicatedAllocationBufferCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23273. {
  23274. pNext = pNext_;
  23275. return *this;
  23276. }
  23277. DedicatedAllocationBufferCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
  23278. {
  23279. dedicatedAllocation = dedicatedAllocation_;
  23280. return *this;
  23281. }
  23282. operator VkDedicatedAllocationBufferCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  23283. {
  23284. return *reinterpret_cast<const VkDedicatedAllocationBufferCreateInfoNV*>( this );
  23285. }
  23286. operator VkDedicatedAllocationBufferCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  23287. {
  23288. return *reinterpret_cast<VkDedicatedAllocationBufferCreateInfoNV*>( this );
  23289. }
  23290. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23291. auto operator<=>( DedicatedAllocationBufferCreateInfoNV const& ) const = default;
  23292. #else
  23293. bool operator==( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23294. {
  23295. return ( sType == rhs.sType )
  23296. && ( pNext == rhs.pNext )
  23297. && ( dedicatedAllocation == rhs.dedicatedAllocation );
  23298. }
  23299. bool operator!=( DedicatedAllocationBufferCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23300. {
  23301. return !operator==( rhs );
  23302. }
  23303. #endif
  23304. public:
  23305. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationBufferCreateInfoNV;
  23306. const void* pNext = {};
  23307. VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
  23308. };
  23309. static_assert( sizeof( DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ), "struct and wrapper have different size!" );
  23310. static_assert( std::is_standard_layout<DedicatedAllocationBufferCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  23311. template <>
  23312. struct CppType<StructureType, StructureType::eDedicatedAllocationBufferCreateInfoNV>
  23313. {
  23314. using Type = DedicatedAllocationBufferCreateInfoNV;
  23315. };
  23316. struct DedicatedAllocationImageCreateInfoNV
  23317. {
  23318. static const bool allowDuplicate = false;
  23319. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationImageCreateInfoNV;
  23320. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23321. VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
  23322. : dedicatedAllocation( dedicatedAllocation_ )
  23323. {}
  23324. VULKAN_HPP_CONSTEXPR DedicatedAllocationImageCreateInfoNV( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23325. DedicatedAllocationImageCreateInfoNV( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23326. : DedicatedAllocationImageCreateInfoNV( *reinterpret_cast<DedicatedAllocationImageCreateInfoNV const *>( &rhs ) )
  23327. {}
  23328. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23329. VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationImageCreateInfoNV & operator=( DedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23330. DedicatedAllocationImageCreateInfoNV & operator=( VkDedicatedAllocationImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23331. {
  23332. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV const *>( &rhs );
  23333. return *this;
  23334. }
  23335. DedicatedAllocationImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23336. {
  23337. pNext = pNext_;
  23338. return *this;
  23339. }
  23340. DedicatedAllocationImageCreateInfoNV & setDedicatedAllocation( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation_ ) VULKAN_HPP_NOEXCEPT
  23341. {
  23342. dedicatedAllocation = dedicatedAllocation_;
  23343. return *this;
  23344. }
  23345. operator VkDedicatedAllocationImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  23346. {
  23347. return *reinterpret_cast<const VkDedicatedAllocationImageCreateInfoNV*>( this );
  23348. }
  23349. operator VkDedicatedAllocationImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  23350. {
  23351. return *reinterpret_cast<VkDedicatedAllocationImageCreateInfoNV*>( this );
  23352. }
  23353. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23354. auto operator<=>( DedicatedAllocationImageCreateInfoNV const& ) const = default;
  23355. #else
  23356. bool operator==( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23357. {
  23358. return ( sType == rhs.sType )
  23359. && ( pNext == rhs.pNext )
  23360. && ( dedicatedAllocation == rhs.dedicatedAllocation );
  23361. }
  23362. bool operator!=( DedicatedAllocationImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23363. {
  23364. return !operator==( rhs );
  23365. }
  23366. #endif
  23367. public:
  23368. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationImageCreateInfoNV;
  23369. const void* pNext = {};
  23370. VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocation = {};
  23371. };
  23372. static_assert( sizeof( DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ), "struct and wrapper have different size!" );
  23373. static_assert( std::is_standard_layout<DedicatedAllocationImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  23374. template <>
  23375. struct CppType<StructureType, StructureType::eDedicatedAllocationImageCreateInfoNV>
  23376. {
  23377. using Type = DedicatedAllocationImageCreateInfoNV;
  23378. };
  23379. struct DedicatedAllocationMemoryAllocateInfoNV
  23380. {
  23381. static const bool allowDuplicate = false;
  23382. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
  23383. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23384. VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
  23385. : image( image_ ), buffer( buffer_ )
  23386. {}
  23387. VULKAN_HPP_CONSTEXPR DedicatedAllocationMemoryAllocateInfoNV( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23388. DedicatedAllocationMemoryAllocateInfoNV( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23389. : DedicatedAllocationMemoryAllocateInfoNV( *reinterpret_cast<DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs ) )
  23390. {}
  23391. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23392. VULKAN_HPP_CONSTEXPR_14 DedicatedAllocationMemoryAllocateInfoNV & operator=( DedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23393. DedicatedAllocationMemoryAllocateInfoNV & operator=( VkDedicatedAllocationMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  23394. {
  23395. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV const *>( &rhs );
  23396. return *this;
  23397. }
  23398. DedicatedAllocationMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23399. {
  23400. pNext = pNext_;
  23401. return *this;
  23402. }
  23403. DedicatedAllocationMemoryAllocateInfoNV & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  23404. {
  23405. image = image_;
  23406. return *this;
  23407. }
  23408. DedicatedAllocationMemoryAllocateInfoNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  23409. {
  23410. buffer = buffer_;
  23411. return *this;
  23412. }
  23413. operator VkDedicatedAllocationMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  23414. {
  23415. return *reinterpret_cast<const VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
  23416. }
  23417. operator VkDedicatedAllocationMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
  23418. {
  23419. return *reinterpret_cast<VkDedicatedAllocationMemoryAllocateInfoNV*>( this );
  23420. }
  23421. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23422. auto operator<=>( DedicatedAllocationMemoryAllocateInfoNV const& ) const = default;
  23423. #else
  23424. bool operator==( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23425. {
  23426. return ( sType == rhs.sType )
  23427. && ( pNext == rhs.pNext )
  23428. && ( image == rhs.image )
  23429. && ( buffer == rhs.buffer );
  23430. }
  23431. bool operator!=( DedicatedAllocationMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  23432. {
  23433. return !operator==( rhs );
  23434. }
  23435. #endif
  23436. public:
  23437. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDedicatedAllocationMemoryAllocateInfoNV;
  23438. const void* pNext = {};
  23439. VULKAN_HPP_NAMESPACE::Image image = {};
  23440. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  23441. };
  23442. static_assert( sizeof( DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
  23443. static_assert( std::is_standard_layout<DedicatedAllocationMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
  23444. template <>
  23445. struct CppType<StructureType, StructureType::eDedicatedAllocationMemoryAllocateInfoNV>
  23446. {
  23447. using Type = DedicatedAllocationMemoryAllocateInfoNV;
  23448. };
  23449. struct DescriptorBufferInfo
  23450. {
  23451. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23452. VULKAN_HPP_CONSTEXPR DescriptorBufferInfo(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize range_ = {}) VULKAN_HPP_NOEXCEPT
  23453. : buffer( buffer_ ), offset( offset_ ), range( range_ )
  23454. {}
  23455. VULKAN_HPP_CONSTEXPR DescriptorBufferInfo( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23456. DescriptorBufferInfo( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23457. : DescriptorBufferInfo( *reinterpret_cast<DescriptorBufferInfo const *>( &rhs ) )
  23458. {}
  23459. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23460. VULKAN_HPP_CONSTEXPR_14 DescriptorBufferInfo & operator=( DescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23461. DescriptorBufferInfo & operator=( VkDescriptorBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23462. {
  23463. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo const *>( &rhs );
  23464. return *this;
  23465. }
  23466. DescriptorBufferInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  23467. {
  23468. buffer = buffer_;
  23469. return *this;
  23470. }
  23471. DescriptorBufferInfo & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  23472. {
  23473. offset = offset_;
  23474. return *this;
  23475. }
  23476. DescriptorBufferInfo & setRange( VULKAN_HPP_NAMESPACE::DeviceSize range_ ) VULKAN_HPP_NOEXCEPT
  23477. {
  23478. range = range_;
  23479. return *this;
  23480. }
  23481. operator VkDescriptorBufferInfo const&() const VULKAN_HPP_NOEXCEPT
  23482. {
  23483. return *reinterpret_cast<const VkDescriptorBufferInfo*>( this );
  23484. }
  23485. operator VkDescriptorBufferInfo &() VULKAN_HPP_NOEXCEPT
  23486. {
  23487. return *reinterpret_cast<VkDescriptorBufferInfo*>( this );
  23488. }
  23489. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23490. auto operator<=>( DescriptorBufferInfo const& ) const = default;
  23491. #else
  23492. bool operator==( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23493. {
  23494. return ( buffer == rhs.buffer )
  23495. && ( offset == rhs.offset )
  23496. && ( range == rhs.range );
  23497. }
  23498. bool operator!=( DescriptorBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23499. {
  23500. return !operator==( rhs );
  23501. }
  23502. #endif
  23503. public:
  23504. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  23505. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  23506. VULKAN_HPP_NAMESPACE::DeviceSize range = {};
  23507. };
  23508. static_assert( sizeof( DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
  23509. static_assert( std::is_standard_layout<DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
  23510. class Sampler
  23511. {
  23512. public:
  23513. using CType = VkSampler;
  23514. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSampler;
  23515. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler;
  23516. public:
  23517. VULKAN_HPP_CONSTEXPR Sampler() VULKAN_HPP_NOEXCEPT
  23518. : m_sampler(VK_NULL_HANDLE)
  23519. {}
  23520. VULKAN_HPP_CONSTEXPR Sampler( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23521. : m_sampler(VK_NULL_HANDLE)
  23522. {}
  23523. VULKAN_HPP_TYPESAFE_EXPLICIT Sampler( VkSampler sampler ) VULKAN_HPP_NOEXCEPT
  23524. : m_sampler( sampler )
  23525. {}
  23526. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  23527. Sampler & operator=(VkSampler sampler) VULKAN_HPP_NOEXCEPT
  23528. {
  23529. m_sampler = sampler;
  23530. return *this;
  23531. }
  23532. #endif
  23533. Sampler & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23534. {
  23535. m_sampler = VK_NULL_HANDLE;
  23536. return *this;
  23537. }
  23538. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23539. auto operator<=>( Sampler const& ) const = default;
  23540. #else
  23541. bool operator==( Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
  23542. {
  23543. return m_sampler == rhs.m_sampler;
  23544. }
  23545. bool operator!=(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
  23546. {
  23547. return m_sampler != rhs.m_sampler;
  23548. }
  23549. bool operator<(Sampler const & rhs ) const VULKAN_HPP_NOEXCEPT
  23550. {
  23551. return m_sampler < rhs.m_sampler;
  23552. }
  23553. #endif
  23554. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSampler() const VULKAN_HPP_NOEXCEPT
  23555. {
  23556. return m_sampler;
  23557. }
  23558. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  23559. {
  23560. return m_sampler != VK_NULL_HANDLE;
  23561. }
  23562. bool operator!() const VULKAN_HPP_NOEXCEPT
  23563. {
  23564. return m_sampler == VK_NULL_HANDLE;
  23565. }
  23566. private:
  23567. VkSampler m_sampler;
  23568. };
  23569. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
  23570. template <>
  23571. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSampler>
  23572. {
  23573. using type = VULKAN_HPP_NAMESPACE::Sampler;
  23574. };
  23575. template <>
  23576. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSampler>
  23577. {
  23578. using Type = VULKAN_HPP_NAMESPACE::Sampler;
  23579. };
  23580. template <>
  23581. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSampler>
  23582. {
  23583. using Type = VULKAN_HPP_NAMESPACE::Sampler;
  23584. };
  23585. template <>
  23586. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Sampler>
  23587. {
  23588. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  23589. };
  23590. class ImageView
  23591. {
  23592. public:
  23593. using CType = VkImageView;
  23594. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eImageView;
  23595. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView;
  23596. public:
  23597. VULKAN_HPP_CONSTEXPR ImageView() VULKAN_HPP_NOEXCEPT
  23598. : m_imageView(VK_NULL_HANDLE)
  23599. {}
  23600. VULKAN_HPP_CONSTEXPR ImageView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23601. : m_imageView(VK_NULL_HANDLE)
  23602. {}
  23603. VULKAN_HPP_TYPESAFE_EXPLICIT ImageView( VkImageView imageView ) VULKAN_HPP_NOEXCEPT
  23604. : m_imageView( imageView )
  23605. {}
  23606. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  23607. ImageView & operator=(VkImageView imageView) VULKAN_HPP_NOEXCEPT
  23608. {
  23609. m_imageView = imageView;
  23610. return *this;
  23611. }
  23612. #endif
  23613. ImageView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23614. {
  23615. m_imageView = VK_NULL_HANDLE;
  23616. return *this;
  23617. }
  23618. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23619. auto operator<=>( ImageView const& ) const = default;
  23620. #else
  23621. bool operator==( ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
  23622. {
  23623. return m_imageView == rhs.m_imageView;
  23624. }
  23625. bool operator!=(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
  23626. {
  23627. return m_imageView != rhs.m_imageView;
  23628. }
  23629. bool operator<(ImageView const & rhs ) const VULKAN_HPP_NOEXCEPT
  23630. {
  23631. return m_imageView < rhs.m_imageView;
  23632. }
  23633. #endif
  23634. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkImageView() const VULKAN_HPP_NOEXCEPT
  23635. {
  23636. return m_imageView;
  23637. }
  23638. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  23639. {
  23640. return m_imageView != VK_NULL_HANDLE;
  23641. }
  23642. bool operator!() const VULKAN_HPP_NOEXCEPT
  23643. {
  23644. return m_imageView == VK_NULL_HANDLE;
  23645. }
  23646. private:
  23647. VkImageView m_imageView;
  23648. };
  23649. static_assert( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
  23650. template <>
  23651. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eImageView>
  23652. {
  23653. using type = VULKAN_HPP_NAMESPACE::ImageView;
  23654. };
  23655. template <>
  23656. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eImageView>
  23657. {
  23658. using Type = VULKAN_HPP_NAMESPACE::ImageView;
  23659. };
  23660. template <>
  23661. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eImageView>
  23662. {
  23663. using Type = VULKAN_HPP_NAMESPACE::ImageView;
  23664. };
  23665. template <>
  23666. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ImageView>
  23667. {
  23668. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  23669. };
  23670. struct DescriptorImageInfo
  23671. {
  23672. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23673. VULKAN_HPP_CONSTEXPR DescriptorImageInfo(VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}, VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  23674. : sampler( sampler_ ), imageView( imageView_ ), imageLayout( imageLayout_ )
  23675. {}
  23676. VULKAN_HPP_CONSTEXPR DescriptorImageInfo( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23677. DescriptorImageInfo( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23678. : DescriptorImageInfo( *reinterpret_cast<DescriptorImageInfo const *>( &rhs ) )
  23679. {}
  23680. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23681. VULKAN_HPP_CONSTEXPR_14 DescriptorImageInfo & operator=( DescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23682. DescriptorImageInfo & operator=( VkDescriptorImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23683. {
  23684. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorImageInfo const *>( &rhs );
  23685. return *this;
  23686. }
  23687. DescriptorImageInfo & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
  23688. {
  23689. sampler = sampler_;
  23690. return *this;
  23691. }
  23692. DescriptorImageInfo & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
  23693. {
  23694. imageView = imageView_;
  23695. return *this;
  23696. }
  23697. DescriptorImageInfo & setImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout imageLayout_ ) VULKAN_HPP_NOEXCEPT
  23698. {
  23699. imageLayout = imageLayout_;
  23700. return *this;
  23701. }
  23702. operator VkDescriptorImageInfo const&() const VULKAN_HPP_NOEXCEPT
  23703. {
  23704. return *reinterpret_cast<const VkDescriptorImageInfo*>( this );
  23705. }
  23706. operator VkDescriptorImageInfo &() VULKAN_HPP_NOEXCEPT
  23707. {
  23708. return *reinterpret_cast<VkDescriptorImageInfo*>( this );
  23709. }
  23710. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23711. auto operator<=>( DescriptorImageInfo const& ) const = default;
  23712. #else
  23713. bool operator==( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23714. {
  23715. return ( sampler == rhs.sampler )
  23716. && ( imageView == rhs.imageView )
  23717. && ( imageLayout == rhs.imageLayout );
  23718. }
  23719. bool operator!=( DescriptorImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23720. {
  23721. return !operator==( rhs );
  23722. }
  23723. #endif
  23724. public:
  23725. VULKAN_HPP_NAMESPACE::Sampler sampler = {};
  23726. VULKAN_HPP_NAMESPACE::ImageView imageView = {};
  23727. VULKAN_HPP_NAMESPACE::ImageLayout imageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  23728. };
  23729. static_assert( sizeof( DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
  23730. static_assert( std::is_standard_layout<DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
  23731. struct DescriptorPoolSize
  23732. {
  23733. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23734. VULKAN_HPP_CONSTEXPR DescriptorPoolSize(VULKAN_HPP_NAMESPACE::DescriptorType type_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
  23735. : type( type_ ), descriptorCount( descriptorCount_ )
  23736. {}
  23737. VULKAN_HPP_CONSTEXPR DescriptorPoolSize( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23738. DescriptorPoolSize( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
  23739. : DescriptorPoolSize( *reinterpret_cast<DescriptorPoolSize const *>( &rhs ) )
  23740. {}
  23741. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23742. VULKAN_HPP_CONSTEXPR_14 DescriptorPoolSize & operator=( DescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23743. DescriptorPoolSize & operator=( VkDescriptorPoolSize const & rhs ) VULKAN_HPP_NOEXCEPT
  23744. {
  23745. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolSize const *>( &rhs );
  23746. return *this;
  23747. }
  23748. DescriptorPoolSize & setType( VULKAN_HPP_NAMESPACE::DescriptorType type_ ) VULKAN_HPP_NOEXCEPT
  23749. {
  23750. type = type_;
  23751. return *this;
  23752. }
  23753. DescriptorPoolSize & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
  23754. {
  23755. descriptorCount = descriptorCount_;
  23756. return *this;
  23757. }
  23758. operator VkDescriptorPoolSize const&() const VULKAN_HPP_NOEXCEPT
  23759. {
  23760. return *reinterpret_cast<const VkDescriptorPoolSize*>( this );
  23761. }
  23762. operator VkDescriptorPoolSize &() VULKAN_HPP_NOEXCEPT
  23763. {
  23764. return *reinterpret_cast<VkDescriptorPoolSize*>( this );
  23765. }
  23766. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23767. auto operator<=>( DescriptorPoolSize const& ) const = default;
  23768. #else
  23769. bool operator==( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
  23770. {
  23771. return ( type == rhs.type )
  23772. && ( descriptorCount == rhs.descriptorCount );
  23773. }
  23774. bool operator!=( DescriptorPoolSize const& rhs ) const VULKAN_HPP_NOEXCEPT
  23775. {
  23776. return !operator==( rhs );
  23777. }
  23778. #endif
  23779. public:
  23780. VULKAN_HPP_NAMESPACE::DescriptorType type = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
  23781. uint32_t descriptorCount = {};
  23782. };
  23783. static_assert( sizeof( DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
  23784. static_assert( std::is_standard_layout<DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
  23785. struct DescriptorPoolCreateInfo
  23786. {
  23787. static const bool allowDuplicate = false;
  23788. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolCreateInfo;
  23789. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23790. VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ = {}, uint32_t maxSets_ = {}, uint32_t poolSizeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ = {}) VULKAN_HPP_NOEXCEPT
  23791. : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( poolSizeCount_ ), pPoolSizes( pPoolSizes_ )
  23792. {}
  23793. VULKAN_HPP_CONSTEXPR DescriptorPoolCreateInfo( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23794. DescriptorPoolCreateInfo( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23795. : DescriptorPoolCreateInfo( *reinterpret_cast<DescriptorPoolCreateInfo const *>( &rhs ) )
  23796. {}
  23797. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23798. DescriptorPoolCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_, uint32_t maxSets_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ )
  23799. : flags( flags_ ), maxSets( maxSets_ ), poolSizeCount( static_cast<uint32_t>( poolSizes_.size() ) ), pPoolSizes( poolSizes_.data() )
  23800. {}
  23801. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23802. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23803. VULKAN_HPP_CONSTEXPR_14 DescriptorPoolCreateInfo & operator=( DescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23804. DescriptorPoolCreateInfo & operator=( VkDescriptorPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  23805. {
  23806. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo const *>( &rhs );
  23807. return *this;
  23808. }
  23809. DescriptorPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23810. {
  23811. pNext = pNext_;
  23812. return *this;
  23813. }
  23814. DescriptorPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  23815. {
  23816. flags = flags_;
  23817. return *this;
  23818. }
  23819. DescriptorPoolCreateInfo & setMaxSets( uint32_t maxSets_ ) VULKAN_HPP_NOEXCEPT
  23820. {
  23821. maxSets = maxSets_;
  23822. return *this;
  23823. }
  23824. DescriptorPoolCreateInfo & setPoolSizeCount( uint32_t poolSizeCount_ ) VULKAN_HPP_NOEXCEPT
  23825. {
  23826. poolSizeCount = poolSizeCount_;
  23827. return *this;
  23828. }
  23829. DescriptorPoolCreateInfo & setPPoolSizes( const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes_ ) VULKAN_HPP_NOEXCEPT
  23830. {
  23831. pPoolSizes = pPoolSizes_;
  23832. return *this;
  23833. }
  23834. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23835. DescriptorPoolCreateInfo & setPoolSizes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorPoolSize> const & poolSizes_ ) VULKAN_HPP_NOEXCEPT
  23836. {
  23837. poolSizeCount = static_cast<uint32_t>( poolSizes_.size() );
  23838. pPoolSizes = poolSizes_.data();
  23839. return *this;
  23840. }
  23841. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  23842. operator VkDescriptorPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  23843. {
  23844. return *reinterpret_cast<const VkDescriptorPoolCreateInfo*>( this );
  23845. }
  23846. operator VkDescriptorPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
  23847. {
  23848. return *reinterpret_cast<VkDescriptorPoolCreateInfo*>( this );
  23849. }
  23850. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23851. auto operator<=>( DescriptorPoolCreateInfo const& ) const = default;
  23852. #else
  23853. bool operator==( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23854. {
  23855. return ( sType == rhs.sType )
  23856. && ( pNext == rhs.pNext )
  23857. && ( flags == rhs.flags )
  23858. && ( maxSets == rhs.maxSets )
  23859. && ( poolSizeCount == rhs.poolSizeCount )
  23860. && ( pPoolSizes == rhs.pPoolSizes );
  23861. }
  23862. bool operator!=( DescriptorPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  23863. {
  23864. return !operator==( rhs );
  23865. }
  23866. #endif
  23867. public:
  23868. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolCreateInfo;
  23869. const void* pNext = {};
  23870. VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags flags = {};
  23871. uint32_t maxSets = {};
  23872. uint32_t poolSizeCount = {};
  23873. const VULKAN_HPP_NAMESPACE::DescriptorPoolSize* pPoolSizes = {};
  23874. };
  23875. static_assert( sizeof( DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ), "struct and wrapper have different size!" );
  23876. static_assert( std::is_standard_layout<DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
  23877. template <>
  23878. struct CppType<StructureType, StructureType::eDescriptorPoolCreateInfo>
  23879. {
  23880. using Type = DescriptorPoolCreateInfo;
  23881. };
  23882. struct DescriptorPoolInlineUniformBlockCreateInfoEXT
  23883. {
  23884. static const bool allowDuplicate = false;
  23885. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
  23886. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23887. VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT(uint32_t maxInlineUniformBlockBindings_ = {}) VULKAN_HPP_NOEXCEPT
  23888. : maxInlineUniformBlockBindings( maxInlineUniformBlockBindings_ )
  23889. {}
  23890. VULKAN_HPP_CONSTEXPR DescriptorPoolInlineUniformBlockCreateInfoEXT( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23891. DescriptorPoolInlineUniformBlockCreateInfoEXT( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23892. : DescriptorPoolInlineUniformBlockCreateInfoEXT( *reinterpret_cast<DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs ) )
  23893. {}
  23894. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  23895. VULKAN_HPP_CONSTEXPR_14 DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( DescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  23896. DescriptorPoolInlineUniformBlockCreateInfoEXT & operator=( VkDescriptorPoolInlineUniformBlockCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  23897. {
  23898. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT const *>( &rhs );
  23899. return *this;
  23900. }
  23901. DescriptorPoolInlineUniformBlockCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  23902. {
  23903. pNext = pNext_;
  23904. return *this;
  23905. }
  23906. DescriptorPoolInlineUniformBlockCreateInfoEXT & setMaxInlineUniformBlockBindings( uint32_t maxInlineUniformBlockBindings_ ) VULKAN_HPP_NOEXCEPT
  23907. {
  23908. maxInlineUniformBlockBindings = maxInlineUniformBlockBindings_;
  23909. return *this;
  23910. }
  23911. operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  23912. {
  23913. return *reinterpret_cast<const VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
  23914. }
  23915. operator VkDescriptorPoolInlineUniformBlockCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  23916. {
  23917. return *reinterpret_cast<VkDescriptorPoolInlineUniformBlockCreateInfoEXT*>( this );
  23918. }
  23919. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23920. auto operator<=>( DescriptorPoolInlineUniformBlockCreateInfoEXT const& ) const = default;
  23921. #else
  23922. bool operator==( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23923. {
  23924. return ( sType == rhs.sType )
  23925. && ( pNext == rhs.pNext )
  23926. && ( maxInlineUniformBlockBindings == rhs.maxInlineUniformBlockBindings );
  23927. }
  23928. bool operator!=( DescriptorPoolInlineUniformBlockCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  23929. {
  23930. return !operator==( rhs );
  23931. }
  23932. #endif
  23933. public:
  23934. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT;
  23935. const void* pNext = {};
  23936. uint32_t maxInlineUniformBlockBindings = {};
  23937. };
  23938. static_assert( sizeof( DescriptorPoolInlineUniformBlockCreateInfoEXT ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfoEXT ), "struct and wrapper have different size!" );
  23939. static_assert( std::is_standard_layout<DescriptorPoolInlineUniformBlockCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  23940. template <>
  23941. struct CppType<StructureType, StructureType::eDescriptorPoolInlineUniformBlockCreateInfoEXT>
  23942. {
  23943. using Type = DescriptorPoolInlineUniformBlockCreateInfoEXT;
  23944. };
  23945. class DescriptorPool
  23946. {
  23947. public:
  23948. using CType = VkDescriptorPool;
  23949. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool;
  23950. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool;
  23951. public:
  23952. VULKAN_HPP_CONSTEXPR DescriptorPool() VULKAN_HPP_NOEXCEPT
  23953. : m_descriptorPool(VK_NULL_HANDLE)
  23954. {}
  23955. VULKAN_HPP_CONSTEXPR DescriptorPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23956. : m_descriptorPool(VK_NULL_HANDLE)
  23957. {}
  23958. VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorPool( VkDescriptorPool descriptorPool ) VULKAN_HPP_NOEXCEPT
  23959. : m_descriptorPool( descriptorPool )
  23960. {}
  23961. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  23962. DescriptorPool & operator=(VkDescriptorPool descriptorPool) VULKAN_HPP_NOEXCEPT
  23963. {
  23964. m_descriptorPool = descriptorPool;
  23965. return *this;
  23966. }
  23967. #endif
  23968. DescriptorPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  23969. {
  23970. m_descriptorPool = VK_NULL_HANDLE;
  23971. return *this;
  23972. }
  23973. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  23974. auto operator<=>( DescriptorPool const& ) const = default;
  23975. #else
  23976. bool operator==( DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  23977. {
  23978. return m_descriptorPool == rhs.m_descriptorPool;
  23979. }
  23980. bool operator!=(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  23981. {
  23982. return m_descriptorPool != rhs.m_descriptorPool;
  23983. }
  23984. bool operator<(DescriptorPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  23985. {
  23986. return m_descriptorPool < rhs.m_descriptorPool;
  23987. }
  23988. #endif
  23989. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorPool() const VULKAN_HPP_NOEXCEPT
  23990. {
  23991. return m_descriptorPool;
  23992. }
  23993. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  23994. {
  23995. return m_descriptorPool != VK_NULL_HANDLE;
  23996. }
  23997. bool operator!() const VULKAN_HPP_NOEXCEPT
  23998. {
  23999. return m_descriptorPool == VK_NULL_HANDLE;
  24000. }
  24001. private:
  24002. VkDescriptorPool m_descriptorPool;
  24003. };
  24004. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
  24005. template <>
  24006. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorPool>
  24007. {
  24008. using type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  24009. };
  24010. template <>
  24011. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorPool>
  24012. {
  24013. using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  24014. };
  24015. template <>
  24016. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorPool>
  24017. {
  24018. using Type = VULKAN_HPP_NAMESPACE::DescriptorPool;
  24019. };
  24020. template <>
  24021. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorPool>
  24022. {
  24023. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  24024. };
  24025. class DescriptorSetLayout
  24026. {
  24027. public:
  24028. using CType = VkDescriptorSetLayout;
  24029. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout;
  24030. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout;
  24031. public:
  24032. VULKAN_HPP_CONSTEXPR DescriptorSetLayout() VULKAN_HPP_NOEXCEPT
  24033. : m_descriptorSetLayout(VK_NULL_HANDLE)
  24034. {}
  24035. VULKAN_HPP_CONSTEXPR DescriptorSetLayout( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  24036. : m_descriptorSetLayout(VK_NULL_HANDLE)
  24037. {}
  24038. VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorSetLayout( VkDescriptorSetLayout descriptorSetLayout ) VULKAN_HPP_NOEXCEPT
  24039. : m_descriptorSetLayout( descriptorSetLayout )
  24040. {}
  24041. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  24042. DescriptorSetLayout & operator=(VkDescriptorSetLayout descriptorSetLayout) VULKAN_HPP_NOEXCEPT
  24043. {
  24044. m_descriptorSetLayout = descriptorSetLayout;
  24045. return *this;
  24046. }
  24047. #endif
  24048. DescriptorSetLayout & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  24049. {
  24050. m_descriptorSetLayout = VK_NULL_HANDLE;
  24051. return *this;
  24052. }
  24053. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24054. auto operator<=>( DescriptorSetLayout const& ) const = default;
  24055. #else
  24056. bool operator==( DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  24057. {
  24058. return m_descriptorSetLayout == rhs.m_descriptorSetLayout;
  24059. }
  24060. bool operator!=(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  24061. {
  24062. return m_descriptorSetLayout != rhs.m_descriptorSetLayout;
  24063. }
  24064. bool operator<(DescriptorSetLayout const & rhs ) const VULKAN_HPP_NOEXCEPT
  24065. {
  24066. return m_descriptorSetLayout < rhs.m_descriptorSetLayout;
  24067. }
  24068. #endif
  24069. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorSetLayout() const VULKAN_HPP_NOEXCEPT
  24070. {
  24071. return m_descriptorSetLayout;
  24072. }
  24073. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  24074. {
  24075. return m_descriptorSetLayout != VK_NULL_HANDLE;
  24076. }
  24077. bool operator!() const VULKAN_HPP_NOEXCEPT
  24078. {
  24079. return m_descriptorSetLayout == VK_NULL_HANDLE;
  24080. }
  24081. private:
  24082. VkDescriptorSetLayout m_descriptorSetLayout;
  24083. };
  24084. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
  24085. template <>
  24086. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorSetLayout>
  24087. {
  24088. using type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  24089. };
  24090. template <>
  24091. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorSetLayout>
  24092. {
  24093. using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  24094. };
  24095. template <>
  24096. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorSetLayout>
  24097. {
  24098. using Type = VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
  24099. };
  24100. template <>
  24101. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
  24102. {
  24103. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  24104. };
  24105. struct DescriptorSetAllocateInfo
  24106. {
  24107. static const bool allowDuplicate = false;
  24108. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetAllocateInfo;
  24109. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24110. VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo(VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ = {}, uint32_t descriptorSetCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}) VULKAN_HPP_NOEXCEPT
  24111. : descriptorPool( descriptorPool_ ), descriptorSetCount( descriptorSetCount_ ), pSetLayouts( pSetLayouts_ )
  24112. {}
  24113. VULKAN_HPP_CONSTEXPR DescriptorSetAllocateInfo( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24114. DescriptorSetAllocateInfo( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24115. : DescriptorSetAllocateInfo( *reinterpret_cast<DescriptorSetAllocateInfo const *>( &rhs ) )
  24116. {}
  24117. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24118. DescriptorSetAllocateInfo( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ )
  24119. : descriptorPool( descriptorPool_ ), descriptorSetCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() )
  24120. {}
  24121. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24122. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24123. VULKAN_HPP_CONSTEXPR_14 DescriptorSetAllocateInfo & operator=( DescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24124. DescriptorSetAllocateInfo & operator=( VkDescriptorSetAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24125. {
  24126. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo const *>( &rhs );
  24127. return *this;
  24128. }
  24129. DescriptorSetAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24130. {
  24131. pNext = pNext_;
  24132. return *this;
  24133. }
  24134. DescriptorSetAllocateInfo & setDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool_ ) VULKAN_HPP_NOEXCEPT
  24135. {
  24136. descriptorPool = descriptorPool_;
  24137. return *this;
  24138. }
  24139. DescriptorSetAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
  24140. {
  24141. descriptorSetCount = descriptorSetCount_;
  24142. return *this;
  24143. }
  24144. DescriptorSetAllocateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
  24145. {
  24146. pSetLayouts = pSetLayouts_;
  24147. return *this;
  24148. }
  24149. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24150. DescriptorSetAllocateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
  24151. {
  24152. descriptorSetCount = static_cast<uint32_t>( setLayouts_.size() );
  24153. pSetLayouts = setLayouts_.data();
  24154. return *this;
  24155. }
  24156. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24157. operator VkDescriptorSetAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  24158. {
  24159. return *reinterpret_cast<const VkDescriptorSetAllocateInfo*>( this );
  24160. }
  24161. operator VkDescriptorSetAllocateInfo &() VULKAN_HPP_NOEXCEPT
  24162. {
  24163. return *reinterpret_cast<VkDescriptorSetAllocateInfo*>( this );
  24164. }
  24165. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24166. auto operator<=>( DescriptorSetAllocateInfo const& ) const = default;
  24167. #else
  24168. bool operator==( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24169. {
  24170. return ( sType == rhs.sType )
  24171. && ( pNext == rhs.pNext )
  24172. && ( descriptorPool == rhs.descriptorPool )
  24173. && ( descriptorSetCount == rhs.descriptorSetCount )
  24174. && ( pSetLayouts == rhs.pSetLayouts );
  24175. }
  24176. bool operator!=( DescriptorSetAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24177. {
  24178. return !operator==( rhs );
  24179. }
  24180. #endif
  24181. public:
  24182. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetAllocateInfo;
  24183. const void* pNext = {};
  24184. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool = {};
  24185. uint32_t descriptorSetCount = {};
  24186. const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
  24187. };
  24188. static_assert( sizeof( DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ), "struct and wrapper have different size!" );
  24189. static_assert( std::is_standard_layout<DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  24190. template <>
  24191. struct CppType<StructureType, StructureType::eDescriptorSetAllocateInfo>
  24192. {
  24193. using Type = DescriptorSetAllocateInfo;
  24194. };
  24195. struct DescriptorSetLayoutBinding
  24196. {
  24197. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24198. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding(uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ = {}) VULKAN_HPP_NOEXCEPT
  24199. : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( descriptorCount_ ), stageFlags( stageFlags_ ), pImmutableSamplers( pImmutableSamplers_ )
  24200. {}
  24201. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBinding( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24202. DescriptorSetLayoutBinding( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
  24203. : DescriptorSetLayoutBinding( *reinterpret_cast<DescriptorSetLayoutBinding const *>( &rhs ) )
  24204. {}
  24205. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24206. DescriptorSetLayoutBinding( uint32_t binding_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ )
  24207. : binding( binding_ ), descriptorType( descriptorType_ ), descriptorCount( static_cast<uint32_t>( immutableSamplers_.size() ) ), stageFlags( stageFlags_ ), pImmutableSamplers( immutableSamplers_.data() )
  24208. {}
  24209. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24210. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24211. VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBinding & operator=( DescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24212. DescriptorSetLayoutBinding & operator=( VkDescriptorSetLayoutBinding const & rhs ) VULKAN_HPP_NOEXCEPT
  24213. {
  24214. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding const *>( &rhs );
  24215. return *this;
  24216. }
  24217. DescriptorSetLayoutBinding & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
  24218. {
  24219. binding = binding_;
  24220. return *this;
  24221. }
  24222. DescriptorSetLayoutBinding & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
  24223. {
  24224. descriptorType = descriptorType_;
  24225. return *this;
  24226. }
  24227. DescriptorSetLayoutBinding & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
  24228. {
  24229. descriptorCount = descriptorCount_;
  24230. return *this;
  24231. }
  24232. DescriptorSetLayoutBinding & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
  24233. {
  24234. stageFlags = stageFlags_;
  24235. return *this;
  24236. }
  24237. DescriptorSetLayoutBinding & setPImmutableSamplers( const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers_ ) VULKAN_HPP_NOEXCEPT
  24238. {
  24239. pImmutableSamplers = pImmutableSamplers_;
  24240. return *this;
  24241. }
  24242. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24243. DescriptorSetLayoutBinding & setImmutableSamplers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Sampler> const & immutableSamplers_ ) VULKAN_HPP_NOEXCEPT
  24244. {
  24245. descriptorCount = static_cast<uint32_t>( immutableSamplers_.size() );
  24246. pImmutableSamplers = immutableSamplers_.data();
  24247. return *this;
  24248. }
  24249. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24250. operator VkDescriptorSetLayoutBinding const&() const VULKAN_HPP_NOEXCEPT
  24251. {
  24252. return *reinterpret_cast<const VkDescriptorSetLayoutBinding*>( this );
  24253. }
  24254. operator VkDescriptorSetLayoutBinding &() VULKAN_HPP_NOEXCEPT
  24255. {
  24256. return *reinterpret_cast<VkDescriptorSetLayoutBinding*>( this );
  24257. }
  24258. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24259. auto operator<=>( DescriptorSetLayoutBinding const& ) const = default;
  24260. #else
  24261. bool operator==( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
  24262. {
  24263. return ( binding == rhs.binding )
  24264. && ( descriptorType == rhs.descriptorType )
  24265. && ( descriptorCount == rhs.descriptorCount )
  24266. && ( stageFlags == rhs.stageFlags )
  24267. && ( pImmutableSamplers == rhs.pImmutableSamplers );
  24268. }
  24269. bool operator!=( DescriptorSetLayoutBinding const& rhs ) const VULKAN_HPP_NOEXCEPT
  24270. {
  24271. return !operator==( rhs );
  24272. }
  24273. #endif
  24274. public:
  24275. uint32_t binding = {};
  24276. VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
  24277. uint32_t descriptorCount = {};
  24278. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
  24279. const VULKAN_HPP_NAMESPACE::Sampler* pImmutableSamplers = {};
  24280. };
  24281. static_assert( sizeof( DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ), "struct and wrapper have different size!" );
  24282. static_assert( std::is_standard_layout<DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
  24283. struct DescriptorSetLayoutBindingFlagsCreateInfo
  24284. {
  24285. static const bool allowDuplicate = false;
  24286. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
  24287. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24288. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo(uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ = {}) VULKAN_HPP_NOEXCEPT
  24289. : bindingCount( bindingCount_ ), pBindingFlags( pBindingFlags_ )
  24290. {}
  24291. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutBindingFlagsCreateInfo( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24292. DescriptorSetLayoutBindingFlagsCreateInfo( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24293. : DescriptorSetLayoutBindingFlagsCreateInfo( *reinterpret_cast<DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs ) )
  24294. {}
  24295. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24296. DescriptorSetLayoutBindingFlagsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ )
  24297. : bindingCount( static_cast<uint32_t>( bindingFlags_.size() ) ), pBindingFlags( bindingFlags_.data() )
  24298. {}
  24299. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24300. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24301. VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutBindingFlagsCreateInfo & operator=( DescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24302. DescriptorSetLayoutBindingFlagsCreateInfo & operator=( VkDescriptorSetLayoutBindingFlagsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24303. {
  24304. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo const *>( &rhs );
  24305. return *this;
  24306. }
  24307. DescriptorSetLayoutBindingFlagsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24308. {
  24309. pNext = pNext_;
  24310. return *this;
  24311. }
  24312. DescriptorSetLayoutBindingFlagsCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
  24313. {
  24314. bindingCount = bindingCount_;
  24315. return *this;
  24316. }
  24317. DescriptorSetLayoutBindingFlagsCreateInfo & setPBindingFlags( const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags_ ) VULKAN_HPP_NOEXCEPT
  24318. {
  24319. pBindingFlags = pBindingFlags_;
  24320. return *this;
  24321. }
  24322. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24323. DescriptorSetLayoutBindingFlagsCreateInfo & setBindingFlags( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags> const & bindingFlags_ ) VULKAN_HPP_NOEXCEPT
  24324. {
  24325. bindingCount = static_cast<uint32_t>( bindingFlags_.size() );
  24326. pBindingFlags = bindingFlags_.data();
  24327. return *this;
  24328. }
  24329. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24330. operator VkDescriptorSetLayoutBindingFlagsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  24331. {
  24332. return *reinterpret_cast<const VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
  24333. }
  24334. operator VkDescriptorSetLayoutBindingFlagsCreateInfo &() VULKAN_HPP_NOEXCEPT
  24335. {
  24336. return *reinterpret_cast<VkDescriptorSetLayoutBindingFlagsCreateInfo*>( this );
  24337. }
  24338. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24339. auto operator<=>( DescriptorSetLayoutBindingFlagsCreateInfo const& ) const = default;
  24340. #else
  24341. bool operator==( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24342. {
  24343. return ( sType == rhs.sType )
  24344. && ( pNext == rhs.pNext )
  24345. && ( bindingCount == rhs.bindingCount )
  24346. && ( pBindingFlags == rhs.pBindingFlags );
  24347. }
  24348. bool operator!=( DescriptorSetLayoutBindingFlagsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24349. {
  24350. return !operator==( rhs );
  24351. }
  24352. #endif
  24353. public:
  24354. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo;
  24355. const void* pNext = {};
  24356. uint32_t bindingCount = {};
  24357. const VULKAN_HPP_NAMESPACE::DescriptorBindingFlags* pBindingFlags = {};
  24358. };
  24359. static_assert( sizeof( DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ), "struct and wrapper have different size!" );
  24360. static_assert( std::is_standard_layout<DescriptorSetLayoutBindingFlagsCreateInfo>::value, "struct wrapper is not a standard layout!" );
  24361. template <>
  24362. struct CppType<StructureType, StructureType::eDescriptorSetLayoutBindingFlagsCreateInfo>
  24363. {
  24364. using Type = DescriptorSetLayoutBindingFlagsCreateInfo;
  24365. };
  24366. using DescriptorSetLayoutBindingFlagsCreateInfoEXT = DescriptorSetLayoutBindingFlagsCreateInfo;
  24367. struct DescriptorSetLayoutCreateInfo
  24368. {
  24369. static const bool allowDuplicate = false;
  24370. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutCreateInfo;
  24371. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24372. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ = {}, uint32_t bindingCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ = {}) VULKAN_HPP_NOEXCEPT
  24373. : flags( flags_ ), bindingCount( bindingCount_ ), pBindings( pBindings_ )
  24374. {}
  24375. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutCreateInfo( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24376. DescriptorSetLayoutCreateInfo( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24377. : DescriptorSetLayoutCreateInfo( *reinterpret_cast<DescriptorSetLayoutCreateInfo const *>( &rhs ) )
  24378. {}
  24379. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24380. DescriptorSetLayoutCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ )
  24381. : flags( flags_ ), bindingCount( static_cast<uint32_t>( bindings_.size() ) ), pBindings( bindings_.data() )
  24382. {}
  24383. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24384. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24385. VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutCreateInfo & operator=( DescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24386. DescriptorSetLayoutCreateInfo & operator=( VkDescriptorSetLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24387. {
  24388. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo const *>( &rhs );
  24389. return *this;
  24390. }
  24391. DescriptorSetLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24392. {
  24393. pNext = pNext_;
  24394. return *this;
  24395. }
  24396. DescriptorSetLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  24397. {
  24398. flags = flags_;
  24399. return *this;
  24400. }
  24401. DescriptorSetLayoutCreateInfo & setBindingCount( uint32_t bindingCount_ ) VULKAN_HPP_NOEXCEPT
  24402. {
  24403. bindingCount = bindingCount_;
  24404. return *this;
  24405. }
  24406. DescriptorSetLayoutCreateInfo & setPBindings( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings_ ) VULKAN_HPP_NOEXCEPT
  24407. {
  24408. pBindings = pBindings_;
  24409. return *this;
  24410. }
  24411. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24412. DescriptorSetLayoutCreateInfo & setBindings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding> const & bindings_ ) VULKAN_HPP_NOEXCEPT
  24413. {
  24414. bindingCount = static_cast<uint32_t>( bindings_.size() );
  24415. pBindings = bindings_.data();
  24416. return *this;
  24417. }
  24418. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24419. operator VkDescriptorSetLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  24420. {
  24421. return *reinterpret_cast<const VkDescriptorSetLayoutCreateInfo*>( this );
  24422. }
  24423. operator VkDescriptorSetLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
  24424. {
  24425. return *reinterpret_cast<VkDescriptorSetLayoutCreateInfo*>( this );
  24426. }
  24427. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24428. auto operator<=>( DescriptorSetLayoutCreateInfo const& ) const = default;
  24429. #else
  24430. bool operator==( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24431. {
  24432. return ( sType == rhs.sType )
  24433. && ( pNext == rhs.pNext )
  24434. && ( flags == rhs.flags )
  24435. && ( bindingCount == rhs.bindingCount )
  24436. && ( pBindings == rhs.pBindings );
  24437. }
  24438. bool operator!=( DescriptorSetLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24439. {
  24440. return !operator==( rhs );
  24441. }
  24442. #endif
  24443. public:
  24444. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutCreateInfo;
  24445. const void* pNext = {};
  24446. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags flags = {};
  24447. uint32_t bindingCount = {};
  24448. const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding* pBindings = {};
  24449. };
  24450. static_assert( sizeof( DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ), "struct and wrapper have different size!" );
  24451. static_assert( std::is_standard_layout<DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
  24452. template <>
  24453. struct CppType<StructureType, StructureType::eDescriptorSetLayoutCreateInfo>
  24454. {
  24455. using Type = DescriptorSetLayoutCreateInfo;
  24456. };
  24457. struct DescriptorSetLayoutSupport
  24458. {
  24459. static const bool allowDuplicate = false;
  24460. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetLayoutSupport;
  24461. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24462. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport(VULKAN_HPP_NAMESPACE::Bool32 supported_ = {}) VULKAN_HPP_NOEXCEPT
  24463. : supported( supported_ )
  24464. {}
  24465. VULKAN_HPP_CONSTEXPR DescriptorSetLayoutSupport( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24466. DescriptorSetLayoutSupport( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
  24467. : DescriptorSetLayoutSupport( *reinterpret_cast<DescriptorSetLayoutSupport const *>( &rhs ) )
  24468. {}
  24469. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24470. VULKAN_HPP_CONSTEXPR_14 DescriptorSetLayoutSupport & operator=( DescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24471. DescriptorSetLayoutSupport & operator=( VkDescriptorSetLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
  24472. {
  24473. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport const *>( &rhs );
  24474. return *this;
  24475. }
  24476. operator VkDescriptorSetLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
  24477. {
  24478. return *reinterpret_cast<const VkDescriptorSetLayoutSupport*>( this );
  24479. }
  24480. operator VkDescriptorSetLayoutSupport &() VULKAN_HPP_NOEXCEPT
  24481. {
  24482. return *reinterpret_cast<VkDescriptorSetLayoutSupport*>( this );
  24483. }
  24484. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24485. auto operator<=>( DescriptorSetLayoutSupport const& ) const = default;
  24486. #else
  24487. bool operator==( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
  24488. {
  24489. return ( sType == rhs.sType )
  24490. && ( pNext == rhs.pNext )
  24491. && ( supported == rhs.supported );
  24492. }
  24493. bool operator!=( DescriptorSetLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
  24494. {
  24495. return !operator==( rhs );
  24496. }
  24497. #endif
  24498. public:
  24499. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetLayoutSupport;
  24500. void* pNext = {};
  24501. VULKAN_HPP_NAMESPACE::Bool32 supported = {};
  24502. };
  24503. static_assert( sizeof( DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ), "struct and wrapper have different size!" );
  24504. static_assert( std::is_standard_layout<DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
  24505. template <>
  24506. struct CppType<StructureType, StructureType::eDescriptorSetLayoutSupport>
  24507. {
  24508. using Type = DescriptorSetLayoutSupport;
  24509. };
  24510. using DescriptorSetLayoutSupportKHR = DescriptorSetLayoutSupport;
  24511. struct DescriptorSetVariableDescriptorCountAllocateInfo
  24512. {
  24513. static const bool allowDuplicate = false;
  24514. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
  24515. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24516. VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo(uint32_t descriptorSetCount_ = {}, const uint32_t* pDescriptorCounts_ = {}) VULKAN_HPP_NOEXCEPT
  24517. : descriptorSetCount( descriptorSetCount_ ), pDescriptorCounts( pDescriptorCounts_ )
  24518. {}
  24519. VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountAllocateInfo( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24520. DescriptorSetVariableDescriptorCountAllocateInfo( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24521. : DescriptorSetVariableDescriptorCountAllocateInfo( *reinterpret_cast<DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs ) )
  24522. {}
  24523. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24524. DescriptorSetVariableDescriptorCountAllocateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ )
  24525. : descriptorSetCount( static_cast<uint32_t>( descriptorCounts_.size() ) ), pDescriptorCounts( descriptorCounts_.data() )
  24526. {}
  24527. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24528. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24529. VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountAllocateInfo & operator=( DescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24530. DescriptorSetVariableDescriptorCountAllocateInfo & operator=( VkDescriptorSetVariableDescriptorCountAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24531. {
  24532. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo const *>( &rhs );
  24533. return *this;
  24534. }
  24535. DescriptorSetVariableDescriptorCountAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24536. {
  24537. pNext = pNext_;
  24538. return *this;
  24539. }
  24540. DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorSetCount( uint32_t descriptorSetCount_ ) VULKAN_HPP_NOEXCEPT
  24541. {
  24542. descriptorSetCount = descriptorSetCount_;
  24543. return *this;
  24544. }
  24545. DescriptorSetVariableDescriptorCountAllocateInfo & setPDescriptorCounts( const uint32_t* pDescriptorCounts_ ) VULKAN_HPP_NOEXCEPT
  24546. {
  24547. pDescriptorCounts = pDescriptorCounts_;
  24548. return *this;
  24549. }
  24550. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24551. DescriptorSetVariableDescriptorCountAllocateInfo & setDescriptorCounts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & descriptorCounts_ ) VULKAN_HPP_NOEXCEPT
  24552. {
  24553. descriptorSetCount = static_cast<uint32_t>( descriptorCounts_.size() );
  24554. pDescriptorCounts = descriptorCounts_.data();
  24555. return *this;
  24556. }
  24557. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24558. operator VkDescriptorSetVariableDescriptorCountAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  24559. {
  24560. return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
  24561. }
  24562. operator VkDescriptorSetVariableDescriptorCountAllocateInfo &() VULKAN_HPP_NOEXCEPT
  24563. {
  24564. return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountAllocateInfo*>( this );
  24565. }
  24566. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24567. auto operator<=>( DescriptorSetVariableDescriptorCountAllocateInfo const& ) const = default;
  24568. #else
  24569. bool operator==( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24570. {
  24571. return ( sType == rhs.sType )
  24572. && ( pNext == rhs.pNext )
  24573. && ( descriptorSetCount == rhs.descriptorSetCount )
  24574. && ( pDescriptorCounts == rhs.pDescriptorCounts );
  24575. }
  24576. bool operator!=( DescriptorSetVariableDescriptorCountAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24577. {
  24578. return !operator==( rhs );
  24579. }
  24580. #endif
  24581. public:
  24582. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo;
  24583. const void* pNext = {};
  24584. uint32_t descriptorSetCount = {};
  24585. const uint32_t* pDescriptorCounts = {};
  24586. };
  24587. static_assert( sizeof( DescriptorSetVariableDescriptorCountAllocateInfo ) == sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ), "struct and wrapper have different size!" );
  24588. static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  24589. template <>
  24590. struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountAllocateInfo>
  24591. {
  24592. using Type = DescriptorSetVariableDescriptorCountAllocateInfo;
  24593. };
  24594. using DescriptorSetVariableDescriptorCountAllocateInfoEXT = DescriptorSetVariableDescriptorCountAllocateInfo;
  24595. struct DescriptorSetVariableDescriptorCountLayoutSupport
  24596. {
  24597. static const bool allowDuplicate = false;
  24598. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
  24599. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24600. VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport(uint32_t maxVariableDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
  24601. : maxVariableDescriptorCount( maxVariableDescriptorCount_ )
  24602. {}
  24603. VULKAN_HPP_CONSTEXPR DescriptorSetVariableDescriptorCountLayoutSupport( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24604. DescriptorSetVariableDescriptorCountLayoutSupport( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
  24605. : DescriptorSetVariableDescriptorCountLayoutSupport( *reinterpret_cast<DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs ) )
  24606. {}
  24607. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24608. VULKAN_HPP_CONSTEXPR_14 DescriptorSetVariableDescriptorCountLayoutSupport & operator=( DescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24609. DescriptorSetVariableDescriptorCountLayoutSupport & operator=( VkDescriptorSetVariableDescriptorCountLayoutSupport const & rhs ) VULKAN_HPP_NOEXCEPT
  24610. {
  24611. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport const *>( &rhs );
  24612. return *this;
  24613. }
  24614. operator VkDescriptorSetVariableDescriptorCountLayoutSupport const&() const VULKAN_HPP_NOEXCEPT
  24615. {
  24616. return *reinterpret_cast<const VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
  24617. }
  24618. operator VkDescriptorSetVariableDescriptorCountLayoutSupport &() VULKAN_HPP_NOEXCEPT
  24619. {
  24620. return *reinterpret_cast<VkDescriptorSetVariableDescriptorCountLayoutSupport*>( this );
  24621. }
  24622. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24623. auto operator<=>( DescriptorSetVariableDescriptorCountLayoutSupport const& ) const = default;
  24624. #else
  24625. bool operator==( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
  24626. {
  24627. return ( sType == rhs.sType )
  24628. && ( pNext == rhs.pNext )
  24629. && ( maxVariableDescriptorCount == rhs.maxVariableDescriptorCount );
  24630. }
  24631. bool operator!=( DescriptorSetVariableDescriptorCountLayoutSupport const& rhs ) const VULKAN_HPP_NOEXCEPT
  24632. {
  24633. return !operator==( rhs );
  24634. }
  24635. #endif
  24636. public:
  24637. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport;
  24638. void* pNext = {};
  24639. uint32_t maxVariableDescriptorCount = {};
  24640. };
  24641. static_assert( sizeof( DescriptorSetVariableDescriptorCountLayoutSupport ) == sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ), "struct and wrapper have different size!" );
  24642. static_assert( std::is_standard_layout<DescriptorSetVariableDescriptorCountLayoutSupport>::value, "struct wrapper is not a standard layout!" );
  24643. template <>
  24644. struct CppType<StructureType, StructureType::eDescriptorSetVariableDescriptorCountLayoutSupport>
  24645. {
  24646. using Type = DescriptorSetVariableDescriptorCountLayoutSupport;
  24647. };
  24648. using DescriptorSetVariableDescriptorCountLayoutSupportEXT = DescriptorSetVariableDescriptorCountLayoutSupport;
  24649. struct DescriptorUpdateTemplateEntry
  24650. {
  24651. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24652. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry(uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, size_t offset_ = {}, size_t stride_ = {}) VULKAN_HPP_NOEXCEPT
  24653. : dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), offset( offset_ ), stride( stride_ )
  24654. {}
  24655. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateEntry( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24656. DescriptorUpdateTemplateEntry( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
  24657. : DescriptorUpdateTemplateEntry( *reinterpret_cast<DescriptorUpdateTemplateEntry const *>( &rhs ) )
  24658. {}
  24659. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24660. VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateEntry & operator=( DescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24661. DescriptorUpdateTemplateEntry & operator=( VkDescriptorUpdateTemplateEntry const & rhs ) VULKAN_HPP_NOEXCEPT
  24662. {
  24663. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry const *>( &rhs );
  24664. return *this;
  24665. }
  24666. DescriptorUpdateTemplateEntry & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
  24667. {
  24668. dstBinding = dstBinding_;
  24669. return *this;
  24670. }
  24671. DescriptorUpdateTemplateEntry & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
  24672. {
  24673. dstArrayElement = dstArrayElement_;
  24674. return *this;
  24675. }
  24676. DescriptorUpdateTemplateEntry & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
  24677. {
  24678. descriptorCount = descriptorCount_;
  24679. return *this;
  24680. }
  24681. DescriptorUpdateTemplateEntry & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
  24682. {
  24683. descriptorType = descriptorType_;
  24684. return *this;
  24685. }
  24686. DescriptorUpdateTemplateEntry & setOffset( size_t offset_ ) VULKAN_HPP_NOEXCEPT
  24687. {
  24688. offset = offset_;
  24689. return *this;
  24690. }
  24691. DescriptorUpdateTemplateEntry & setStride( size_t stride_ ) VULKAN_HPP_NOEXCEPT
  24692. {
  24693. stride = stride_;
  24694. return *this;
  24695. }
  24696. operator VkDescriptorUpdateTemplateEntry const&() const VULKAN_HPP_NOEXCEPT
  24697. {
  24698. return *reinterpret_cast<const VkDescriptorUpdateTemplateEntry*>( this );
  24699. }
  24700. operator VkDescriptorUpdateTemplateEntry &() VULKAN_HPP_NOEXCEPT
  24701. {
  24702. return *reinterpret_cast<VkDescriptorUpdateTemplateEntry*>( this );
  24703. }
  24704. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24705. auto operator<=>( DescriptorUpdateTemplateEntry const& ) const = default;
  24706. #else
  24707. bool operator==( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
  24708. {
  24709. return ( dstBinding == rhs.dstBinding )
  24710. && ( dstArrayElement == rhs.dstArrayElement )
  24711. && ( descriptorCount == rhs.descriptorCount )
  24712. && ( descriptorType == rhs.descriptorType )
  24713. && ( offset == rhs.offset )
  24714. && ( stride == rhs.stride );
  24715. }
  24716. bool operator!=( DescriptorUpdateTemplateEntry const& rhs ) const VULKAN_HPP_NOEXCEPT
  24717. {
  24718. return !operator==( rhs );
  24719. }
  24720. #endif
  24721. public:
  24722. uint32_t dstBinding = {};
  24723. uint32_t dstArrayElement = {};
  24724. uint32_t descriptorCount = {};
  24725. VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
  24726. size_t offset = {};
  24727. size_t stride = {};
  24728. };
  24729. static_assert( sizeof( DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ), "struct and wrapper have different size!" );
  24730. static_assert( std::is_standard_layout<DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
  24731. using DescriptorUpdateTemplateEntryKHR = DescriptorUpdateTemplateEntry;
  24732. struct DescriptorUpdateTemplateCreateInfo
  24733. {
  24734. static const bool allowDuplicate = false;
  24735. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDescriptorUpdateTemplateCreateInfo;
  24736. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24737. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ = {}, uint32_t descriptorUpdateEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ = {}, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {}) VULKAN_HPP_NOEXCEPT
  24738. : flags( flags_ ), descriptorUpdateEntryCount( descriptorUpdateEntryCount_ ), pDescriptorUpdateEntries( pDescriptorUpdateEntries_ ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
  24739. {}
  24740. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplateCreateInfo( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24741. DescriptorUpdateTemplateCreateInfo( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24742. : DescriptorUpdateTemplateCreateInfo( *reinterpret_cast<DescriptorUpdateTemplateCreateInfo const *>( &rhs ) )
  24743. {}
  24744. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24745. DescriptorUpdateTemplateCreateInfo( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet, VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ = {}, uint32_t set_ = {} )
  24746. : flags( flags_ ), descriptorUpdateEntryCount( static_cast<uint32_t>( descriptorUpdateEntries_.size() ) ), pDescriptorUpdateEntries( descriptorUpdateEntries_.data() ), templateType( templateType_ ), descriptorSetLayout( descriptorSetLayout_ ), pipelineBindPoint( pipelineBindPoint_ ), pipelineLayout( pipelineLayout_ ), set( set_ )
  24747. {}
  24748. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24749. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24750. VULKAN_HPP_CONSTEXPR_14 DescriptorUpdateTemplateCreateInfo & operator=( DescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24751. DescriptorUpdateTemplateCreateInfo & operator=( VkDescriptorUpdateTemplateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24752. {
  24753. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo const *>( &rhs );
  24754. return *this;
  24755. }
  24756. DescriptorUpdateTemplateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24757. {
  24758. pNext = pNext_;
  24759. return *this;
  24760. }
  24761. DescriptorUpdateTemplateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  24762. {
  24763. flags = flags_;
  24764. return *this;
  24765. }
  24766. DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntryCount( uint32_t descriptorUpdateEntryCount_ ) VULKAN_HPP_NOEXCEPT
  24767. {
  24768. descriptorUpdateEntryCount = descriptorUpdateEntryCount_;
  24769. return *this;
  24770. }
  24771. DescriptorUpdateTemplateCreateInfo & setPDescriptorUpdateEntries( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
  24772. {
  24773. pDescriptorUpdateEntries = pDescriptorUpdateEntries_;
  24774. return *this;
  24775. }
  24776. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24777. DescriptorUpdateTemplateCreateInfo & setDescriptorUpdateEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry> const & descriptorUpdateEntries_ ) VULKAN_HPP_NOEXCEPT
  24778. {
  24779. descriptorUpdateEntryCount = static_cast<uint32_t>( descriptorUpdateEntries_.size() );
  24780. pDescriptorUpdateEntries = descriptorUpdateEntries_.data();
  24781. return *this;
  24782. }
  24783. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24784. DescriptorUpdateTemplateCreateInfo & setTemplateType( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType_ ) VULKAN_HPP_NOEXCEPT
  24785. {
  24786. templateType = templateType_;
  24787. return *this;
  24788. }
  24789. DescriptorUpdateTemplateCreateInfo & setDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout_ ) VULKAN_HPP_NOEXCEPT
  24790. {
  24791. descriptorSetLayout = descriptorSetLayout_;
  24792. return *this;
  24793. }
  24794. DescriptorUpdateTemplateCreateInfo & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  24795. {
  24796. pipelineBindPoint = pipelineBindPoint_;
  24797. return *this;
  24798. }
  24799. DescriptorUpdateTemplateCreateInfo & setPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout_ ) VULKAN_HPP_NOEXCEPT
  24800. {
  24801. pipelineLayout = pipelineLayout_;
  24802. return *this;
  24803. }
  24804. DescriptorUpdateTemplateCreateInfo & setSet( uint32_t set_ ) VULKAN_HPP_NOEXCEPT
  24805. {
  24806. set = set_;
  24807. return *this;
  24808. }
  24809. operator VkDescriptorUpdateTemplateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  24810. {
  24811. return *reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo*>( this );
  24812. }
  24813. operator VkDescriptorUpdateTemplateCreateInfo &() VULKAN_HPP_NOEXCEPT
  24814. {
  24815. return *reinterpret_cast<VkDescriptorUpdateTemplateCreateInfo*>( this );
  24816. }
  24817. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24818. auto operator<=>( DescriptorUpdateTemplateCreateInfo const& ) const = default;
  24819. #else
  24820. bool operator==( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24821. {
  24822. return ( sType == rhs.sType )
  24823. && ( pNext == rhs.pNext )
  24824. && ( flags == rhs.flags )
  24825. && ( descriptorUpdateEntryCount == rhs.descriptorUpdateEntryCount )
  24826. && ( pDescriptorUpdateEntries == rhs.pDescriptorUpdateEntries )
  24827. && ( templateType == rhs.templateType )
  24828. && ( descriptorSetLayout == rhs.descriptorSetLayout )
  24829. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  24830. && ( pipelineLayout == rhs.pipelineLayout )
  24831. && ( set == rhs.set );
  24832. }
  24833. bool operator!=( DescriptorUpdateTemplateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24834. {
  24835. return !operator==( rhs );
  24836. }
  24837. #endif
  24838. public:
  24839. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDescriptorUpdateTemplateCreateInfo;
  24840. const void* pNext = {};
  24841. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags flags = {};
  24842. uint32_t descriptorUpdateEntryCount = {};
  24843. const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry* pDescriptorUpdateEntries = {};
  24844. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType templateType = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType::eDescriptorSet;
  24845. VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout = {};
  24846. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  24847. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout = {};
  24848. uint32_t set = {};
  24849. };
  24850. static_assert( sizeof( DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ), "struct and wrapper have different size!" );
  24851. static_assert( std::is_standard_layout<DescriptorUpdateTemplateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  24852. template <>
  24853. struct CppType<StructureType, StructureType::eDescriptorUpdateTemplateCreateInfo>
  24854. {
  24855. using Type = DescriptorUpdateTemplateCreateInfo;
  24856. };
  24857. using DescriptorUpdateTemplateCreateInfoKHR = DescriptorUpdateTemplateCreateInfo;
  24858. struct DeviceQueueCreateInfo
  24859. {
  24860. static const bool allowDuplicate = false;
  24861. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueCreateInfo;
  24862. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24863. VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueCount_ = {}, const float* pQueuePriorities_ = {}) VULKAN_HPP_NOEXCEPT
  24864. : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( queueCount_ ), pQueuePriorities( pQueuePriorities_ )
  24865. {}
  24866. VULKAN_HPP_CONSTEXPR DeviceQueueCreateInfo( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24867. DeviceQueueCreateInfo( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24868. : DeviceQueueCreateInfo( *reinterpret_cast<DeviceQueueCreateInfo const *>( &rhs ) )
  24869. {}
  24870. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24871. DeviceQueueCreateInfo( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_, uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ )
  24872. : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueCount( static_cast<uint32_t>( queuePriorities_.size() ) ), pQueuePriorities( queuePriorities_.data() )
  24873. {}
  24874. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24875. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24876. VULKAN_HPP_CONSTEXPR_14 DeviceQueueCreateInfo & operator=( DeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24877. DeviceQueueCreateInfo & operator=( VkDeviceQueueCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  24878. {
  24879. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo const *>( &rhs );
  24880. return *this;
  24881. }
  24882. DeviceQueueCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  24883. {
  24884. pNext = pNext_;
  24885. return *this;
  24886. }
  24887. DeviceQueueCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  24888. {
  24889. flags = flags_;
  24890. return *this;
  24891. }
  24892. DeviceQueueCreateInfo & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  24893. {
  24894. queueFamilyIndex = queueFamilyIndex_;
  24895. return *this;
  24896. }
  24897. DeviceQueueCreateInfo & setQueueCount( uint32_t queueCount_ ) VULKAN_HPP_NOEXCEPT
  24898. {
  24899. queueCount = queueCount_;
  24900. return *this;
  24901. }
  24902. DeviceQueueCreateInfo & setPQueuePriorities( const float* pQueuePriorities_ ) VULKAN_HPP_NOEXCEPT
  24903. {
  24904. pQueuePriorities = pQueuePriorities_;
  24905. return *this;
  24906. }
  24907. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24908. DeviceQueueCreateInfo & setQueuePriorities( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & queuePriorities_ ) VULKAN_HPP_NOEXCEPT
  24909. {
  24910. queueCount = static_cast<uint32_t>( queuePriorities_.size() );
  24911. pQueuePriorities = queuePriorities_.data();
  24912. return *this;
  24913. }
  24914. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  24915. operator VkDeviceQueueCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  24916. {
  24917. return *reinterpret_cast<const VkDeviceQueueCreateInfo*>( this );
  24918. }
  24919. operator VkDeviceQueueCreateInfo &() VULKAN_HPP_NOEXCEPT
  24920. {
  24921. return *reinterpret_cast<VkDeviceQueueCreateInfo*>( this );
  24922. }
  24923. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  24924. auto operator<=>( DeviceQueueCreateInfo const& ) const = default;
  24925. #else
  24926. bool operator==( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24927. {
  24928. return ( sType == rhs.sType )
  24929. && ( pNext == rhs.pNext )
  24930. && ( flags == rhs.flags )
  24931. && ( queueFamilyIndex == rhs.queueFamilyIndex )
  24932. && ( queueCount == rhs.queueCount )
  24933. && ( pQueuePriorities == rhs.pQueuePriorities );
  24934. }
  24935. bool operator!=( DeviceQueueCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  24936. {
  24937. return !operator==( rhs );
  24938. }
  24939. #endif
  24940. public:
  24941. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueCreateInfo;
  24942. const void* pNext = {};
  24943. VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
  24944. uint32_t queueFamilyIndex = {};
  24945. uint32_t queueCount = {};
  24946. const float* pQueuePriorities = {};
  24947. };
  24948. static_assert( sizeof( DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ), "struct and wrapper have different size!" );
  24949. static_assert( std::is_standard_layout<DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
  24950. template <>
  24951. struct CppType<StructureType, StructureType::eDeviceQueueCreateInfo>
  24952. {
  24953. using Type = DeviceQueueCreateInfo;
  24954. };
  24955. struct PhysicalDeviceFeatures
  24956. {
  24957. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24958. VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ = {}, VULKAN_HPP_NAMESPACE::Bool32 wideLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 largePoints_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ = {}, VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ = {}, VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ = {}) VULKAN_HPP_NOEXCEPT
  24959. : robustBufferAccess( robustBufferAccess_ ), fullDrawIndexUint32( fullDrawIndexUint32_ ), imageCubeArray( imageCubeArray_ ), independentBlend( independentBlend_ ), geometryShader( geometryShader_ ), tessellationShader( tessellationShader_ ), sampleRateShading( sampleRateShading_ ), dualSrcBlend( dualSrcBlend_ ), logicOp( logicOp_ ), multiDrawIndirect( multiDrawIndirect_ ), drawIndirectFirstInstance( drawIndirectFirstInstance_ ), depthClamp( depthClamp_ ), depthBiasClamp( depthBiasClamp_ ), fillModeNonSolid( fillModeNonSolid_ ), depthBounds( depthBounds_ ), wideLines( wideLines_ ), largePoints( largePoints_ ), alphaToOne( alphaToOne_ ), multiViewport( multiViewport_ ), samplerAnisotropy( samplerAnisotropy_ ), textureCompressionETC2( textureCompressionETC2_ ), textureCompressionASTC_LDR( textureCompressionASTC_LDR_ ), textureCompressionBC( textureCompressionBC_ ), occlusionQueryPrecise( occlusionQueryPrecise_ ), pipelineStatisticsQuery( pipelineStatisticsQuery_ ), vertexPipelineStoresAndAtomics( vertexPipelineStoresAndAtomics_ ), fragmentStoresAndAtomics( fragmentStoresAndAtomics_ ), shaderTessellationAndGeometryPointSize( shaderTessellationAndGeometryPointSize_ ), shaderImageGatherExtended( shaderImageGatherExtended_ ), shaderStorageImageExtendedFormats( shaderStorageImageExtendedFormats_ ), shaderStorageImageMultisample( shaderStorageImageMultisample_ ), shaderStorageImageReadWithoutFormat( shaderStorageImageReadWithoutFormat_ ), shaderStorageImageWriteWithoutFormat( shaderStorageImageWriteWithoutFormat_ ), shaderUniformBufferArrayDynamicIndexing( shaderUniformBufferArrayDynamicIndexing_ ), shaderSampledImageArrayDynamicIndexing( shaderSampledImageArrayDynamicIndexing_ ), shaderStorageBufferArrayDynamicIndexing( shaderStorageBufferArrayDynamicIndexing_ ), shaderStorageImageArrayDynamicIndexing( shaderStorageImageArrayDynamicIndexing_ ), shaderClipDistance( shaderClipDistance_ ), shaderCullDistance( shaderCullDistance_ ), shaderFloat64( shaderFloat64_ ), shaderInt64( shaderInt64_ ), shaderInt16( shaderInt16_ ), shaderResourceResidency( shaderResourceResidency_ ), shaderResourceMinLod( shaderResourceMinLod_ ), sparseBinding( sparseBinding_ ), sparseResidencyBuffer( sparseResidencyBuffer_ ), sparseResidencyImage2D( sparseResidencyImage2D_ ), sparseResidencyImage3D( sparseResidencyImage3D_ ), sparseResidency2Samples( sparseResidency2Samples_ ), sparseResidency4Samples( sparseResidency4Samples_ ), sparseResidency8Samples( sparseResidency8Samples_ ), sparseResidency16Samples( sparseResidency16Samples_ ), sparseResidencyAliased( sparseResidencyAliased_ ), variableMultisampleRate( variableMultisampleRate_ ), inheritedQueries( inheritedQueries_ )
  24960. {}
  24961. VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24962. PhysicalDeviceFeatures( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  24963. : PhysicalDeviceFeatures( *reinterpret_cast<PhysicalDeviceFeatures const *>( &rhs ) )
  24964. {}
  24965. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  24966. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures & operator=( PhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  24967. PhysicalDeviceFeatures & operator=( VkPhysicalDeviceFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  24968. {
  24969. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const *>( &rhs );
  24970. return *this;
  24971. }
  24972. PhysicalDeviceFeatures & setRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
  24973. {
  24974. robustBufferAccess = robustBufferAccess_;
  24975. return *this;
  24976. }
  24977. PhysicalDeviceFeatures & setFullDrawIndexUint32( VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32_ ) VULKAN_HPP_NOEXCEPT
  24978. {
  24979. fullDrawIndexUint32 = fullDrawIndexUint32_;
  24980. return *this;
  24981. }
  24982. PhysicalDeviceFeatures & setImageCubeArray( VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray_ ) VULKAN_HPP_NOEXCEPT
  24983. {
  24984. imageCubeArray = imageCubeArray_;
  24985. return *this;
  24986. }
  24987. PhysicalDeviceFeatures & setIndependentBlend( VULKAN_HPP_NAMESPACE::Bool32 independentBlend_ ) VULKAN_HPP_NOEXCEPT
  24988. {
  24989. independentBlend = independentBlend_;
  24990. return *this;
  24991. }
  24992. PhysicalDeviceFeatures & setGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 geometryShader_ ) VULKAN_HPP_NOEXCEPT
  24993. {
  24994. geometryShader = geometryShader_;
  24995. return *this;
  24996. }
  24997. PhysicalDeviceFeatures & setTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 tessellationShader_ ) VULKAN_HPP_NOEXCEPT
  24998. {
  24999. tessellationShader = tessellationShader_;
  25000. return *this;
  25001. }
  25002. PhysicalDeviceFeatures & setSampleRateShading( VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading_ ) VULKAN_HPP_NOEXCEPT
  25003. {
  25004. sampleRateShading = sampleRateShading_;
  25005. return *this;
  25006. }
  25007. PhysicalDeviceFeatures & setDualSrcBlend( VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend_ ) VULKAN_HPP_NOEXCEPT
  25008. {
  25009. dualSrcBlend = dualSrcBlend_;
  25010. return *this;
  25011. }
  25012. PhysicalDeviceFeatures & setLogicOp( VULKAN_HPP_NAMESPACE::Bool32 logicOp_ ) VULKAN_HPP_NOEXCEPT
  25013. {
  25014. logicOp = logicOp_;
  25015. return *this;
  25016. }
  25017. PhysicalDeviceFeatures & setMultiDrawIndirect( VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect_ ) VULKAN_HPP_NOEXCEPT
  25018. {
  25019. multiDrawIndirect = multiDrawIndirect_;
  25020. return *this;
  25021. }
  25022. PhysicalDeviceFeatures & setDrawIndirectFirstInstance( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance_ ) VULKAN_HPP_NOEXCEPT
  25023. {
  25024. drawIndirectFirstInstance = drawIndirectFirstInstance_;
  25025. return *this;
  25026. }
  25027. PhysicalDeviceFeatures & setDepthClamp( VULKAN_HPP_NAMESPACE::Bool32 depthClamp_ ) VULKAN_HPP_NOEXCEPT
  25028. {
  25029. depthClamp = depthClamp_;
  25030. return *this;
  25031. }
  25032. PhysicalDeviceFeatures & setDepthBiasClamp( VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
  25033. {
  25034. depthBiasClamp = depthBiasClamp_;
  25035. return *this;
  25036. }
  25037. PhysicalDeviceFeatures & setFillModeNonSolid( VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid_ ) VULKAN_HPP_NOEXCEPT
  25038. {
  25039. fillModeNonSolid = fillModeNonSolid_;
  25040. return *this;
  25041. }
  25042. PhysicalDeviceFeatures & setDepthBounds( VULKAN_HPP_NAMESPACE::Bool32 depthBounds_ ) VULKAN_HPP_NOEXCEPT
  25043. {
  25044. depthBounds = depthBounds_;
  25045. return *this;
  25046. }
  25047. PhysicalDeviceFeatures & setWideLines( VULKAN_HPP_NAMESPACE::Bool32 wideLines_ ) VULKAN_HPP_NOEXCEPT
  25048. {
  25049. wideLines = wideLines_;
  25050. return *this;
  25051. }
  25052. PhysicalDeviceFeatures & setLargePoints( VULKAN_HPP_NAMESPACE::Bool32 largePoints_ ) VULKAN_HPP_NOEXCEPT
  25053. {
  25054. largePoints = largePoints_;
  25055. return *this;
  25056. }
  25057. PhysicalDeviceFeatures & setAlphaToOne( VULKAN_HPP_NAMESPACE::Bool32 alphaToOne_ ) VULKAN_HPP_NOEXCEPT
  25058. {
  25059. alphaToOne = alphaToOne_;
  25060. return *this;
  25061. }
  25062. PhysicalDeviceFeatures & setMultiViewport( VULKAN_HPP_NAMESPACE::Bool32 multiViewport_ ) VULKAN_HPP_NOEXCEPT
  25063. {
  25064. multiViewport = multiViewport_;
  25065. return *this;
  25066. }
  25067. PhysicalDeviceFeatures & setSamplerAnisotropy( VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy_ ) VULKAN_HPP_NOEXCEPT
  25068. {
  25069. samplerAnisotropy = samplerAnisotropy_;
  25070. return *this;
  25071. }
  25072. PhysicalDeviceFeatures & setTextureCompressionETC2( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2_ ) VULKAN_HPP_NOEXCEPT
  25073. {
  25074. textureCompressionETC2 = textureCompressionETC2_;
  25075. return *this;
  25076. }
  25077. PhysicalDeviceFeatures & setTextureCompressionASTC_LDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR_ ) VULKAN_HPP_NOEXCEPT
  25078. {
  25079. textureCompressionASTC_LDR = textureCompressionASTC_LDR_;
  25080. return *this;
  25081. }
  25082. PhysicalDeviceFeatures & setTextureCompressionBC( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC_ ) VULKAN_HPP_NOEXCEPT
  25083. {
  25084. textureCompressionBC = textureCompressionBC_;
  25085. return *this;
  25086. }
  25087. PhysicalDeviceFeatures & setOcclusionQueryPrecise( VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise_ ) VULKAN_HPP_NOEXCEPT
  25088. {
  25089. occlusionQueryPrecise = occlusionQueryPrecise_;
  25090. return *this;
  25091. }
  25092. PhysicalDeviceFeatures & setPipelineStatisticsQuery( VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery_ ) VULKAN_HPP_NOEXCEPT
  25093. {
  25094. pipelineStatisticsQuery = pipelineStatisticsQuery_;
  25095. return *this;
  25096. }
  25097. PhysicalDeviceFeatures & setVertexPipelineStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
  25098. {
  25099. vertexPipelineStoresAndAtomics = vertexPipelineStoresAndAtomics_;
  25100. return *this;
  25101. }
  25102. PhysicalDeviceFeatures & setFragmentStoresAndAtomics( VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics_ ) VULKAN_HPP_NOEXCEPT
  25103. {
  25104. fragmentStoresAndAtomics = fragmentStoresAndAtomics_;
  25105. return *this;
  25106. }
  25107. PhysicalDeviceFeatures & setShaderTessellationAndGeometryPointSize( VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize_ ) VULKAN_HPP_NOEXCEPT
  25108. {
  25109. shaderTessellationAndGeometryPointSize = shaderTessellationAndGeometryPointSize_;
  25110. return *this;
  25111. }
  25112. PhysicalDeviceFeatures & setShaderImageGatherExtended( VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended_ ) VULKAN_HPP_NOEXCEPT
  25113. {
  25114. shaderImageGatherExtended = shaderImageGatherExtended_;
  25115. return *this;
  25116. }
  25117. PhysicalDeviceFeatures & setShaderStorageImageExtendedFormats( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats_ ) VULKAN_HPP_NOEXCEPT
  25118. {
  25119. shaderStorageImageExtendedFormats = shaderStorageImageExtendedFormats_;
  25120. return *this;
  25121. }
  25122. PhysicalDeviceFeatures & setShaderStorageImageMultisample( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample_ ) VULKAN_HPP_NOEXCEPT
  25123. {
  25124. shaderStorageImageMultisample = shaderStorageImageMultisample_;
  25125. return *this;
  25126. }
  25127. PhysicalDeviceFeatures & setShaderStorageImageReadWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
  25128. {
  25129. shaderStorageImageReadWithoutFormat = shaderStorageImageReadWithoutFormat_;
  25130. return *this;
  25131. }
  25132. PhysicalDeviceFeatures & setShaderStorageImageWriteWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
  25133. {
  25134. shaderStorageImageWriteWithoutFormat = shaderStorageImageWriteWithoutFormat_;
  25135. return *this;
  25136. }
  25137. PhysicalDeviceFeatures & setShaderUniformBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  25138. {
  25139. shaderUniformBufferArrayDynamicIndexing = shaderUniformBufferArrayDynamicIndexing_;
  25140. return *this;
  25141. }
  25142. PhysicalDeviceFeatures & setShaderSampledImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  25143. {
  25144. shaderSampledImageArrayDynamicIndexing = shaderSampledImageArrayDynamicIndexing_;
  25145. return *this;
  25146. }
  25147. PhysicalDeviceFeatures & setShaderStorageBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  25148. {
  25149. shaderStorageBufferArrayDynamicIndexing = shaderStorageBufferArrayDynamicIndexing_;
  25150. return *this;
  25151. }
  25152. PhysicalDeviceFeatures & setShaderStorageImageArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  25153. {
  25154. shaderStorageImageArrayDynamicIndexing = shaderStorageImageArrayDynamicIndexing_;
  25155. return *this;
  25156. }
  25157. PhysicalDeviceFeatures & setShaderClipDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance_ ) VULKAN_HPP_NOEXCEPT
  25158. {
  25159. shaderClipDistance = shaderClipDistance_;
  25160. return *this;
  25161. }
  25162. PhysicalDeviceFeatures & setShaderCullDistance( VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance_ ) VULKAN_HPP_NOEXCEPT
  25163. {
  25164. shaderCullDistance = shaderCullDistance_;
  25165. return *this;
  25166. }
  25167. PhysicalDeviceFeatures & setShaderFloat64( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64_ ) VULKAN_HPP_NOEXCEPT
  25168. {
  25169. shaderFloat64 = shaderFloat64_;
  25170. return *this;
  25171. }
  25172. PhysicalDeviceFeatures & setShaderInt64( VULKAN_HPP_NAMESPACE::Bool32 shaderInt64_ ) VULKAN_HPP_NOEXCEPT
  25173. {
  25174. shaderInt64 = shaderInt64_;
  25175. return *this;
  25176. }
  25177. PhysicalDeviceFeatures & setShaderInt16( VULKAN_HPP_NAMESPACE::Bool32 shaderInt16_ ) VULKAN_HPP_NOEXCEPT
  25178. {
  25179. shaderInt16 = shaderInt16_;
  25180. return *this;
  25181. }
  25182. PhysicalDeviceFeatures & setShaderResourceResidency( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency_ ) VULKAN_HPP_NOEXCEPT
  25183. {
  25184. shaderResourceResidency = shaderResourceResidency_;
  25185. return *this;
  25186. }
  25187. PhysicalDeviceFeatures & setShaderResourceMinLod( VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod_ ) VULKAN_HPP_NOEXCEPT
  25188. {
  25189. shaderResourceMinLod = shaderResourceMinLod_;
  25190. return *this;
  25191. }
  25192. PhysicalDeviceFeatures & setSparseBinding( VULKAN_HPP_NAMESPACE::Bool32 sparseBinding_ ) VULKAN_HPP_NOEXCEPT
  25193. {
  25194. sparseBinding = sparseBinding_;
  25195. return *this;
  25196. }
  25197. PhysicalDeviceFeatures & setSparseResidencyBuffer( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer_ ) VULKAN_HPP_NOEXCEPT
  25198. {
  25199. sparseResidencyBuffer = sparseResidencyBuffer_;
  25200. return *this;
  25201. }
  25202. PhysicalDeviceFeatures & setSparseResidencyImage2D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D_ ) VULKAN_HPP_NOEXCEPT
  25203. {
  25204. sparseResidencyImage2D = sparseResidencyImage2D_;
  25205. return *this;
  25206. }
  25207. PhysicalDeviceFeatures & setSparseResidencyImage3D( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D_ ) VULKAN_HPP_NOEXCEPT
  25208. {
  25209. sparseResidencyImage3D = sparseResidencyImage3D_;
  25210. return *this;
  25211. }
  25212. PhysicalDeviceFeatures & setSparseResidency2Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples_ ) VULKAN_HPP_NOEXCEPT
  25213. {
  25214. sparseResidency2Samples = sparseResidency2Samples_;
  25215. return *this;
  25216. }
  25217. PhysicalDeviceFeatures & setSparseResidency4Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples_ ) VULKAN_HPP_NOEXCEPT
  25218. {
  25219. sparseResidency4Samples = sparseResidency4Samples_;
  25220. return *this;
  25221. }
  25222. PhysicalDeviceFeatures & setSparseResidency8Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples_ ) VULKAN_HPP_NOEXCEPT
  25223. {
  25224. sparseResidency8Samples = sparseResidency8Samples_;
  25225. return *this;
  25226. }
  25227. PhysicalDeviceFeatures & setSparseResidency16Samples( VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples_ ) VULKAN_HPP_NOEXCEPT
  25228. {
  25229. sparseResidency16Samples = sparseResidency16Samples_;
  25230. return *this;
  25231. }
  25232. PhysicalDeviceFeatures & setSparseResidencyAliased( VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased_ ) VULKAN_HPP_NOEXCEPT
  25233. {
  25234. sparseResidencyAliased = sparseResidencyAliased_;
  25235. return *this;
  25236. }
  25237. PhysicalDeviceFeatures & setVariableMultisampleRate( VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate_ ) VULKAN_HPP_NOEXCEPT
  25238. {
  25239. variableMultisampleRate = variableMultisampleRate_;
  25240. return *this;
  25241. }
  25242. PhysicalDeviceFeatures & setInheritedQueries( VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries_ ) VULKAN_HPP_NOEXCEPT
  25243. {
  25244. inheritedQueries = inheritedQueries_;
  25245. return *this;
  25246. }
  25247. operator VkPhysicalDeviceFeatures const&() const VULKAN_HPP_NOEXCEPT
  25248. {
  25249. return *reinterpret_cast<const VkPhysicalDeviceFeatures*>( this );
  25250. }
  25251. operator VkPhysicalDeviceFeatures &() VULKAN_HPP_NOEXCEPT
  25252. {
  25253. return *reinterpret_cast<VkPhysicalDeviceFeatures*>( this );
  25254. }
  25255. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25256. auto operator<=>( PhysicalDeviceFeatures const& ) const = default;
  25257. #else
  25258. bool operator==( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  25259. {
  25260. return ( robustBufferAccess == rhs.robustBufferAccess )
  25261. && ( fullDrawIndexUint32 == rhs.fullDrawIndexUint32 )
  25262. && ( imageCubeArray == rhs.imageCubeArray )
  25263. && ( independentBlend == rhs.independentBlend )
  25264. && ( geometryShader == rhs.geometryShader )
  25265. && ( tessellationShader == rhs.tessellationShader )
  25266. && ( sampleRateShading == rhs.sampleRateShading )
  25267. && ( dualSrcBlend == rhs.dualSrcBlend )
  25268. && ( logicOp == rhs.logicOp )
  25269. && ( multiDrawIndirect == rhs.multiDrawIndirect )
  25270. && ( drawIndirectFirstInstance == rhs.drawIndirectFirstInstance )
  25271. && ( depthClamp == rhs.depthClamp )
  25272. && ( depthBiasClamp == rhs.depthBiasClamp )
  25273. && ( fillModeNonSolid == rhs.fillModeNonSolid )
  25274. && ( depthBounds == rhs.depthBounds )
  25275. && ( wideLines == rhs.wideLines )
  25276. && ( largePoints == rhs.largePoints )
  25277. && ( alphaToOne == rhs.alphaToOne )
  25278. && ( multiViewport == rhs.multiViewport )
  25279. && ( samplerAnisotropy == rhs.samplerAnisotropy )
  25280. && ( textureCompressionETC2 == rhs.textureCompressionETC2 )
  25281. && ( textureCompressionASTC_LDR == rhs.textureCompressionASTC_LDR )
  25282. && ( textureCompressionBC == rhs.textureCompressionBC )
  25283. && ( occlusionQueryPrecise == rhs.occlusionQueryPrecise )
  25284. && ( pipelineStatisticsQuery == rhs.pipelineStatisticsQuery )
  25285. && ( vertexPipelineStoresAndAtomics == rhs.vertexPipelineStoresAndAtomics )
  25286. && ( fragmentStoresAndAtomics == rhs.fragmentStoresAndAtomics )
  25287. && ( shaderTessellationAndGeometryPointSize == rhs.shaderTessellationAndGeometryPointSize )
  25288. && ( shaderImageGatherExtended == rhs.shaderImageGatherExtended )
  25289. && ( shaderStorageImageExtendedFormats == rhs.shaderStorageImageExtendedFormats )
  25290. && ( shaderStorageImageMultisample == rhs.shaderStorageImageMultisample )
  25291. && ( shaderStorageImageReadWithoutFormat == rhs.shaderStorageImageReadWithoutFormat )
  25292. && ( shaderStorageImageWriteWithoutFormat == rhs.shaderStorageImageWriteWithoutFormat )
  25293. && ( shaderUniformBufferArrayDynamicIndexing == rhs.shaderUniformBufferArrayDynamicIndexing )
  25294. && ( shaderSampledImageArrayDynamicIndexing == rhs.shaderSampledImageArrayDynamicIndexing )
  25295. && ( shaderStorageBufferArrayDynamicIndexing == rhs.shaderStorageBufferArrayDynamicIndexing )
  25296. && ( shaderStorageImageArrayDynamicIndexing == rhs.shaderStorageImageArrayDynamicIndexing )
  25297. && ( shaderClipDistance == rhs.shaderClipDistance )
  25298. && ( shaderCullDistance == rhs.shaderCullDistance )
  25299. && ( shaderFloat64 == rhs.shaderFloat64 )
  25300. && ( shaderInt64 == rhs.shaderInt64 )
  25301. && ( shaderInt16 == rhs.shaderInt16 )
  25302. && ( shaderResourceResidency == rhs.shaderResourceResidency )
  25303. && ( shaderResourceMinLod == rhs.shaderResourceMinLod )
  25304. && ( sparseBinding == rhs.sparseBinding )
  25305. && ( sparseResidencyBuffer == rhs.sparseResidencyBuffer )
  25306. && ( sparseResidencyImage2D == rhs.sparseResidencyImage2D )
  25307. && ( sparseResidencyImage3D == rhs.sparseResidencyImage3D )
  25308. && ( sparseResidency2Samples == rhs.sparseResidency2Samples )
  25309. && ( sparseResidency4Samples == rhs.sparseResidency4Samples )
  25310. && ( sparseResidency8Samples == rhs.sparseResidency8Samples )
  25311. && ( sparseResidency16Samples == rhs.sparseResidency16Samples )
  25312. && ( sparseResidencyAliased == rhs.sparseResidencyAliased )
  25313. && ( variableMultisampleRate == rhs.variableMultisampleRate )
  25314. && ( inheritedQueries == rhs.inheritedQueries );
  25315. }
  25316. bool operator!=( PhysicalDeviceFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  25317. {
  25318. return !operator==( rhs );
  25319. }
  25320. #endif
  25321. public:
  25322. VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess = {};
  25323. VULKAN_HPP_NAMESPACE::Bool32 fullDrawIndexUint32 = {};
  25324. VULKAN_HPP_NAMESPACE::Bool32 imageCubeArray = {};
  25325. VULKAN_HPP_NAMESPACE::Bool32 independentBlend = {};
  25326. VULKAN_HPP_NAMESPACE::Bool32 geometryShader = {};
  25327. VULKAN_HPP_NAMESPACE::Bool32 tessellationShader = {};
  25328. VULKAN_HPP_NAMESPACE::Bool32 sampleRateShading = {};
  25329. VULKAN_HPP_NAMESPACE::Bool32 dualSrcBlend = {};
  25330. VULKAN_HPP_NAMESPACE::Bool32 logicOp = {};
  25331. VULKAN_HPP_NAMESPACE::Bool32 multiDrawIndirect = {};
  25332. VULKAN_HPP_NAMESPACE::Bool32 drawIndirectFirstInstance = {};
  25333. VULKAN_HPP_NAMESPACE::Bool32 depthClamp = {};
  25334. VULKAN_HPP_NAMESPACE::Bool32 depthBiasClamp = {};
  25335. VULKAN_HPP_NAMESPACE::Bool32 fillModeNonSolid = {};
  25336. VULKAN_HPP_NAMESPACE::Bool32 depthBounds = {};
  25337. VULKAN_HPP_NAMESPACE::Bool32 wideLines = {};
  25338. VULKAN_HPP_NAMESPACE::Bool32 largePoints = {};
  25339. VULKAN_HPP_NAMESPACE::Bool32 alphaToOne = {};
  25340. VULKAN_HPP_NAMESPACE::Bool32 multiViewport = {};
  25341. VULKAN_HPP_NAMESPACE::Bool32 samplerAnisotropy = {};
  25342. VULKAN_HPP_NAMESPACE::Bool32 textureCompressionETC2 = {};
  25343. VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_LDR = {};
  25344. VULKAN_HPP_NAMESPACE::Bool32 textureCompressionBC = {};
  25345. VULKAN_HPP_NAMESPACE::Bool32 occlusionQueryPrecise = {};
  25346. VULKAN_HPP_NAMESPACE::Bool32 pipelineStatisticsQuery = {};
  25347. VULKAN_HPP_NAMESPACE::Bool32 vertexPipelineStoresAndAtomics = {};
  25348. VULKAN_HPP_NAMESPACE::Bool32 fragmentStoresAndAtomics = {};
  25349. VULKAN_HPP_NAMESPACE::Bool32 shaderTessellationAndGeometryPointSize = {};
  25350. VULKAN_HPP_NAMESPACE::Bool32 shaderImageGatherExtended = {};
  25351. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageExtendedFormats = {};
  25352. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageMultisample = {};
  25353. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageReadWithoutFormat = {};
  25354. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageWriteWithoutFormat = {};
  25355. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayDynamicIndexing = {};
  25356. VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayDynamicIndexing = {};
  25357. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayDynamicIndexing = {};
  25358. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayDynamicIndexing = {};
  25359. VULKAN_HPP_NAMESPACE::Bool32 shaderClipDistance = {};
  25360. VULKAN_HPP_NAMESPACE::Bool32 shaderCullDistance = {};
  25361. VULKAN_HPP_NAMESPACE::Bool32 shaderFloat64 = {};
  25362. VULKAN_HPP_NAMESPACE::Bool32 shaderInt64 = {};
  25363. VULKAN_HPP_NAMESPACE::Bool32 shaderInt16 = {};
  25364. VULKAN_HPP_NAMESPACE::Bool32 shaderResourceResidency = {};
  25365. VULKAN_HPP_NAMESPACE::Bool32 shaderResourceMinLod = {};
  25366. VULKAN_HPP_NAMESPACE::Bool32 sparseBinding = {};
  25367. VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyBuffer = {};
  25368. VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage2D = {};
  25369. VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyImage3D = {};
  25370. VULKAN_HPP_NAMESPACE::Bool32 sparseResidency2Samples = {};
  25371. VULKAN_HPP_NAMESPACE::Bool32 sparseResidency4Samples = {};
  25372. VULKAN_HPP_NAMESPACE::Bool32 sparseResidency8Samples = {};
  25373. VULKAN_HPP_NAMESPACE::Bool32 sparseResidency16Samples = {};
  25374. VULKAN_HPP_NAMESPACE::Bool32 sparseResidencyAliased = {};
  25375. VULKAN_HPP_NAMESPACE::Bool32 variableMultisampleRate = {};
  25376. VULKAN_HPP_NAMESPACE::Bool32 inheritedQueries = {};
  25377. };
  25378. static_assert( sizeof( PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ), "struct and wrapper have different size!" );
  25379. static_assert( std::is_standard_layout<PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
  25380. struct DeviceCreateInfo
  25381. {
  25382. static const bool allowDuplicate = false;
  25383. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceCreateInfo;
  25384. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25385. VULKAN_HPP_CONSTEXPR DeviceCreateInfo(VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ = {}, uint32_t queueCreateInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  25386. : flags( flags_ ), queueCreateInfoCount( queueCreateInfoCount_ ), pQueueCreateInfos( pQueueCreateInfos_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ ), pEnabledFeatures( pEnabledFeatures_ )
  25387. {}
  25388. VULKAN_HPP_CONSTEXPR DeviceCreateInfo( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25389. DeviceCreateInfo( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25390. : DeviceCreateInfo( *reinterpret_cast<DeviceCreateInfo const *>( &rhs ) )
  25391. {}
  25392. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25393. DeviceCreateInfo( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ = {} )
  25394. : flags( flags_ ), queueCreateInfoCount( static_cast<uint32_t>( queueCreateInfos_.size() ) ), pQueueCreateInfos( queueCreateInfos_.data() ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() ), pEnabledFeatures( pEnabledFeatures_ )
  25395. {}
  25396. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25397. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25398. VULKAN_HPP_CONSTEXPR_14 DeviceCreateInfo & operator=( DeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25399. DeviceCreateInfo & operator=( VkDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25400. {
  25401. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceCreateInfo const *>( &rhs );
  25402. return *this;
  25403. }
  25404. DeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25405. {
  25406. pNext = pNext_;
  25407. return *this;
  25408. }
  25409. DeviceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  25410. {
  25411. flags = flags_;
  25412. return *this;
  25413. }
  25414. DeviceCreateInfo & setQueueCreateInfoCount( uint32_t queueCreateInfoCount_ ) VULKAN_HPP_NOEXCEPT
  25415. {
  25416. queueCreateInfoCount = queueCreateInfoCount_;
  25417. return *this;
  25418. }
  25419. DeviceCreateInfo & setPQueueCreateInfos( const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
  25420. {
  25421. pQueueCreateInfos = pQueueCreateInfos_;
  25422. return *this;
  25423. }
  25424. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25425. DeviceCreateInfo & setQueueCreateInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo> const & queueCreateInfos_ ) VULKAN_HPP_NOEXCEPT
  25426. {
  25427. queueCreateInfoCount = static_cast<uint32_t>( queueCreateInfos_.size() );
  25428. pQueueCreateInfos = queueCreateInfos_.data();
  25429. return *this;
  25430. }
  25431. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25432. DeviceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
  25433. {
  25434. enabledLayerCount = enabledLayerCount_;
  25435. return *this;
  25436. }
  25437. DeviceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
  25438. {
  25439. ppEnabledLayerNames = ppEnabledLayerNames_;
  25440. return *this;
  25441. }
  25442. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25443. DeviceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
  25444. {
  25445. enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
  25446. ppEnabledLayerNames = pEnabledLayerNames_.data();
  25447. return *this;
  25448. }
  25449. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25450. DeviceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
  25451. {
  25452. enabledExtensionCount = enabledExtensionCount_;
  25453. return *this;
  25454. }
  25455. DeviceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
  25456. {
  25457. ppEnabledExtensionNames = ppEnabledExtensionNames_;
  25458. return *this;
  25459. }
  25460. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25461. DeviceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
  25462. {
  25463. enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
  25464. ppEnabledExtensionNames = pEnabledExtensionNames_.data();
  25465. return *this;
  25466. }
  25467. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  25468. DeviceCreateInfo & setPEnabledFeatures( const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures_ ) VULKAN_HPP_NOEXCEPT
  25469. {
  25470. pEnabledFeatures = pEnabledFeatures_;
  25471. return *this;
  25472. }
  25473. operator VkDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  25474. {
  25475. return *reinterpret_cast<const VkDeviceCreateInfo*>( this );
  25476. }
  25477. operator VkDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
  25478. {
  25479. return *reinterpret_cast<VkDeviceCreateInfo*>( this );
  25480. }
  25481. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25482. auto operator<=>( DeviceCreateInfo const& ) const = default;
  25483. #else
  25484. bool operator==( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25485. {
  25486. return ( sType == rhs.sType )
  25487. && ( pNext == rhs.pNext )
  25488. && ( flags == rhs.flags )
  25489. && ( queueCreateInfoCount == rhs.queueCreateInfoCount )
  25490. && ( pQueueCreateInfos == rhs.pQueueCreateInfos )
  25491. && ( enabledLayerCount == rhs.enabledLayerCount )
  25492. && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
  25493. && ( enabledExtensionCount == rhs.enabledExtensionCount )
  25494. && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames )
  25495. && ( pEnabledFeatures == rhs.pEnabledFeatures );
  25496. }
  25497. bool operator!=( DeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25498. {
  25499. return !operator==( rhs );
  25500. }
  25501. #endif
  25502. public:
  25503. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceCreateInfo;
  25504. const void* pNext = {};
  25505. VULKAN_HPP_NAMESPACE::DeviceCreateFlags flags = {};
  25506. uint32_t queueCreateInfoCount = {};
  25507. const VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo* pQueueCreateInfos = {};
  25508. uint32_t enabledLayerCount = {};
  25509. const char* const * ppEnabledLayerNames = {};
  25510. uint32_t enabledExtensionCount = {};
  25511. const char* const * ppEnabledExtensionNames = {};
  25512. const VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pEnabledFeatures = {};
  25513. };
  25514. static_assert( sizeof( DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
  25515. static_assert( std::is_standard_layout<DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
  25516. template <>
  25517. struct CppType<StructureType, StructureType::eDeviceCreateInfo>
  25518. {
  25519. using Type = DeviceCreateInfo;
  25520. };
  25521. struct DeviceDeviceMemoryReportCreateInfoEXT
  25522. {
  25523. static const bool allowDuplicate = true;
  25524. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
  25525. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25526. VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ = {}, void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
  25527. : flags( flags_ ), pfnUserCallback( pfnUserCallback_ ), pUserData( pUserData_ )
  25528. {}
  25529. VULKAN_HPP_CONSTEXPR DeviceDeviceMemoryReportCreateInfoEXT( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25530. DeviceDeviceMemoryReportCreateInfoEXT( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  25531. : DeviceDeviceMemoryReportCreateInfoEXT( *reinterpret_cast<DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs ) )
  25532. {}
  25533. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25534. VULKAN_HPP_CONSTEXPR_14 DeviceDeviceMemoryReportCreateInfoEXT & operator=( DeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25535. DeviceDeviceMemoryReportCreateInfoEXT & operator=( VkDeviceDeviceMemoryReportCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  25536. {
  25537. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT const *>( &rhs );
  25538. return *this;
  25539. }
  25540. DeviceDeviceMemoryReportCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25541. {
  25542. pNext = pNext_;
  25543. return *this;
  25544. }
  25545. DeviceDeviceMemoryReportCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  25546. {
  25547. flags = flags_;
  25548. return *this;
  25549. }
  25550. DeviceDeviceMemoryReportCreateInfoEXT & setPfnUserCallback( PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback_ ) VULKAN_HPP_NOEXCEPT
  25551. {
  25552. pfnUserCallback = pfnUserCallback_;
  25553. return *this;
  25554. }
  25555. DeviceDeviceMemoryReportCreateInfoEXT & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
  25556. {
  25557. pUserData = pUserData_;
  25558. return *this;
  25559. }
  25560. operator VkDeviceDeviceMemoryReportCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  25561. {
  25562. return *reinterpret_cast<const VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
  25563. }
  25564. operator VkDeviceDeviceMemoryReportCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  25565. {
  25566. return *reinterpret_cast<VkDeviceDeviceMemoryReportCreateInfoEXT*>( this );
  25567. }
  25568. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25569. auto operator<=>( DeviceDeviceMemoryReportCreateInfoEXT const& ) const = default;
  25570. #else
  25571. bool operator==( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  25572. {
  25573. return ( sType == rhs.sType )
  25574. && ( pNext == rhs.pNext )
  25575. && ( flags == rhs.flags )
  25576. && ( pfnUserCallback == rhs.pfnUserCallback )
  25577. && ( pUserData == rhs.pUserData );
  25578. }
  25579. bool operator!=( DeviceDeviceMemoryReportCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  25580. {
  25581. return !operator==( rhs );
  25582. }
  25583. #endif
  25584. public:
  25585. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDeviceMemoryReportCreateInfoEXT;
  25586. const void* pNext = {};
  25587. VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
  25588. PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback = {};
  25589. void* pUserData = {};
  25590. };
  25591. static_assert( sizeof( DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ), "struct and wrapper have different size!" );
  25592. static_assert( std::is_standard_layout<DeviceDeviceMemoryReportCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  25593. template <>
  25594. struct CppType<StructureType, StructureType::eDeviceDeviceMemoryReportCreateInfoEXT>
  25595. {
  25596. using Type = DeviceDeviceMemoryReportCreateInfoEXT;
  25597. };
  25598. struct DeviceDiagnosticsConfigCreateInfoNV
  25599. {
  25600. static const bool allowDuplicate = false;
  25601. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
  25602. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25603. VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV(VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ = {}) VULKAN_HPP_NOEXCEPT
  25604. : flags( flags_ )
  25605. {}
  25606. VULKAN_HPP_CONSTEXPR DeviceDiagnosticsConfigCreateInfoNV( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25607. DeviceDiagnosticsConfigCreateInfoNV( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  25608. : DeviceDiagnosticsConfigCreateInfoNV( *reinterpret_cast<DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs ) )
  25609. {}
  25610. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25611. VULKAN_HPP_CONSTEXPR_14 DeviceDiagnosticsConfigCreateInfoNV & operator=( DeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25612. DeviceDiagnosticsConfigCreateInfoNV & operator=( VkDeviceDiagnosticsConfigCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  25613. {
  25614. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV const *>( &rhs );
  25615. return *this;
  25616. }
  25617. DeviceDiagnosticsConfigCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25618. {
  25619. pNext = pNext_;
  25620. return *this;
  25621. }
  25622. DeviceDiagnosticsConfigCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  25623. {
  25624. flags = flags_;
  25625. return *this;
  25626. }
  25627. operator VkDeviceDiagnosticsConfigCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  25628. {
  25629. return *reinterpret_cast<const VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
  25630. }
  25631. operator VkDeviceDiagnosticsConfigCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  25632. {
  25633. return *reinterpret_cast<VkDeviceDiagnosticsConfigCreateInfoNV*>( this );
  25634. }
  25635. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25636. auto operator<=>( DeviceDiagnosticsConfigCreateInfoNV const& ) const = default;
  25637. #else
  25638. bool operator==( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  25639. {
  25640. return ( sType == rhs.sType )
  25641. && ( pNext == rhs.pNext )
  25642. && ( flags == rhs.flags );
  25643. }
  25644. bool operator!=( DeviceDiagnosticsConfigCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  25645. {
  25646. return !operator==( rhs );
  25647. }
  25648. #endif
  25649. public:
  25650. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceDiagnosticsConfigCreateInfoNV;
  25651. const void* pNext = {};
  25652. VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV flags = {};
  25653. };
  25654. static_assert( sizeof( DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ), "struct and wrapper have different size!" );
  25655. static_assert( std::is_standard_layout<DeviceDiagnosticsConfigCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  25656. template <>
  25657. struct CppType<StructureType, StructureType::eDeviceDiagnosticsConfigCreateInfoNV>
  25658. {
  25659. using Type = DeviceDiagnosticsConfigCreateInfoNV;
  25660. };
  25661. struct DeviceEventInfoEXT
  25662. {
  25663. static const bool allowDuplicate = false;
  25664. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceEventInfoEXT;
  25665. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25666. VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT(VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug) VULKAN_HPP_NOEXCEPT
  25667. : deviceEvent( deviceEvent_ )
  25668. {}
  25669. VULKAN_HPP_CONSTEXPR DeviceEventInfoEXT( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25670. DeviceEventInfoEXT( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  25671. : DeviceEventInfoEXT( *reinterpret_cast<DeviceEventInfoEXT const *>( &rhs ) )
  25672. {}
  25673. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25674. VULKAN_HPP_CONSTEXPR_14 DeviceEventInfoEXT & operator=( DeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25675. DeviceEventInfoEXT & operator=( VkDeviceEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  25676. {
  25677. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT const *>( &rhs );
  25678. return *this;
  25679. }
  25680. DeviceEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25681. {
  25682. pNext = pNext_;
  25683. return *this;
  25684. }
  25685. DeviceEventInfoEXT & setDeviceEvent( VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent_ ) VULKAN_HPP_NOEXCEPT
  25686. {
  25687. deviceEvent = deviceEvent_;
  25688. return *this;
  25689. }
  25690. operator VkDeviceEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  25691. {
  25692. return *reinterpret_cast<const VkDeviceEventInfoEXT*>( this );
  25693. }
  25694. operator VkDeviceEventInfoEXT &() VULKAN_HPP_NOEXCEPT
  25695. {
  25696. return *reinterpret_cast<VkDeviceEventInfoEXT*>( this );
  25697. }
  25698. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25699. auto operator<=>( DeviceEventInfoEXT const& ) const = default;
  25700. #else
  25701. bool operator==( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  25702. {
  25703. return ( sType == rhs.sType )
  25704. && ( pNext == rhs.pNext )
  25705. && ( deviceEvent == rhs.deviceEvent );
  25706. }
  25707. bool operator!=( DeviceEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  25708. {
  25709. return !operator==( rhs );
  25710. }
  25711. #endif
  25712. public:
  25713. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceEventInfoEXT;
  25714. const void* pNext = {};
  25715. VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT deviceEvent = VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT::eDisplayHotplug;
  25716. };
  25717. static_assert( sizeof( DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
  25718. static_assert( std::is_standard_layout<DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
  25719. template <>
  25720. struct CppType<StructureType, StructureType::eDeviceEventInfoEXT>
  25721. {
  25722. using Type = DeviceEventInfoEXT;
  25723. };
  25724. struct DeviceGroupBindSparseInfo
  25725. {
  25726. static const bool allowDuplicate = false;
  25727. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupBindSparseInfo;
  25728. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25729. VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo(uint32_t resourceDeviceIndex_ = {}, uint32_t memoryDeviceIndex_ = {}) VULKAN_HPP_NOEXCEPT
  25730. : resourceDeviceIndex( resourceDeviceIndex_ ), memoryDeviceIndex( memoryDeviceIndex_ )
  25731. {}
  25732. VULKAN_HPP_CONSTEXPR DeviceGroupBindSparseInfo( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25733. DeviceGroupBindSparseInfo( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25734. : DeviceGroupBindSparseInfo( *reinterpret_cast<DeviceGroupBindSparseInfo const *>( &rhs ) )
  25735. {}
  25736. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25737. VULKAN_HPP_CONSTEXPR_14 DeviceGroupBindSparseInfo & operator=( DeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25738. DeviceGroupBindSparseInfo & operator=( VkDeviceGroupBindSparseInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25739. {
  25740. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo const *>( &rhs );
  25741. return *this;
  25742. }
  25743. DeviceGroupBindSparseInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25744. {
  25745. pNext = pNext_;
  25746. return *this;
  25747. }
  25748. DeviceGroupBindSparseInfo & setResourceDeviceIndex( uint32_t resourceDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
  25749. {
  25750. resourceDeviceIndex = resourceDeviceIndex_;
  25751. return *this;
  25752. }
  25753. DeviceGroupBindSparseInfo & setMemoryDeviceIndex( uint32_t memoryDeviceIndex_ ) VULKAN_HPP_NOEXCEPT
  25754. {
  25755. memoryDeviceIndex = memoryDeviceIndex_;
  25756. return *this;
  25757. }
  25758. operator VkDeviceGroupBindSparseInfo const&() const VULKAN_HPP_NOEXCEPT
  25759. {
  25760. return *reinterpret_cast<const VkDeviceGroupBindSparseInfo*>( this );
  25761. }
  25762. operator VkDeviceGroupBindSparseInfo &() VULKAN_HPP_NOEXCEPT
  25763. {
  25764. return *reinterpret_cast<VkDeviceGroupBindSparseInfo*>( this );
  25765. }
  25766. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25767. auto operator<=>( DeviceGroupBindSparseInfo const& ) const = default;
  25768. #else
  25769. bool operator==( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25770. {
  25771. return ( sType == rhs.sType )
  25772. && ( pNext == rhs.pNext )
  25773. && ( resourceDeviceIndex == rhs.resourceDeviceIndex )
  25774. && ( memoryDeviceIndex == rhs.memoryDeviceIndex );
  25775. }
  25776. bool operator!=( DeviceGroupBindSparseInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25777. {
  25778. return !operator==( rhs );
  25779. }
  25780. #endif
  25781. public:
  25782. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupBindSparseInfo;
  25783. const void* pNext = {};
  25784. uint32_t resourceDeviceIndex = {};
  25785. uint32_t memoryDeviceIndex = {};
  25786. };
  25787. static_assert( sizeof( DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ), "struct and wrapper have different size!" );
  25788. static_assert( std::is_standard_layout<DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
  25789. template <>
  25790. struct CppType<StructureType, StructureType::eDeviceGroupBindSparseInfo>
  25791. {
  25792. using Type = DeviceGroupBindSparseInfo;
  25793. };
  25794. using DeviceGroupBindSparseInfoKHR = DeviceGroupBindSparseInfo;
  25795. struct DeviceGroupCommandBufferBeginInfo
  25796. {
  25797. static const bool allowDuplicate = false;
  25798. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupCommandBufferBeginInfo;
  25799. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25800. VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo(uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
  25801. : deviceMask( deviceMask_ )
  25802. {}
  25803. VULKAN_HPP_CONSTEXPR DeviceGroupCommandBufferBeginInfo( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25804. DeviceGroupCommandBufferBeginInfo( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25805. : DeviceGroupCommandBufferBeginInfo( *reinterpret_cast<DeviceGroupCommandBufferBeginInfo const *>( &rhs ) )
  25806. {}
  25807. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25808. VULKAN_HPP_CONSTEXPR_14 DeviceGroupCommandBufferBeginInfo & operator=( DeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25809. DeviceGroupCommandBufferBeginInfo & operator=( VkDeviceGroupCommandBufferBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  25810. {
  25811. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo const *>( &rhs );
  25812. return *this;
  25813. }
  25814. DeviceGroupCommandBufferBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25815. {
  25816. pNext = pNext_;
  25817. return *this;
  25818. }
  25819. DeviceGroupCommandBufferBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
  25820. {
  25821. deviceMask = deviceMask_;
  25822. return *this;
  25823. }
  25824. operator VkDeviceGroupCommandBufferBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  25825. {
  25826. return *reinterpret_cast<const VkDeviceGroupCommandBufferBeginInfo*>( this );
  25827. }
  25828. operator VkDeviceGroupCommandBufferBeginInfo &() VULKAN_HPP_NOEXCEPT
  25829. {
  25830. return *reinterpret_cast<VkDeviceGroupCommandBufferBeginInfo*>( this );
  25831. }
  25832. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25833. auto operator<=>( DeviceGroupCommandBufferBeginInfo const& ) const = default;
  25834. #else
  25835. bool operator==( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25836. {
  25837. return ( sType == rhs.sType )
  25838. && ( pNext == rhs.pNext )
  25839. && ( deviceMask == rhs.deviceMask );
  25840. }
  25841. bool operator!=( DeviceGroupCommandBufferBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  25842. {
  25843. return !operator==( rhs );
  25844. }
  25845. #endif
  25846. public:
  25847. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupCommandBufferBeginInfo;
  25848. const void* pNext = {};
  25849. uint32_t deviceMask = {};
  25850. };
  25851. static_assert( sizeof( DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ), "struct and wrapper have different size!" );
  25852. static_assert( std::is_standard_layout<DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
  25853. template <>
  25854. struct CppType<StructureType, StructureType::eDeviceGroupCommandBufferBeginInfo>
  25855. {
  25856. using Type = DeviceGroupCommandBufferBeginInfo;
  25857. };
  25858. using DeviceGroupCommandBufferBeginInfoKHR = DeviceGroupCommandBufferBeginInfo;
  25859. class DisplayKHR
  25860. {
  25861. public:
  25862. using CType = VkDisplayKHR;
  25863. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR;
  25864. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR;
  25865. public:
  25866. VULKAN_HPP_CONSTEXPR DisplayKHR() VULKAN_HPP_NOEXCEPT
  25867. : m_displayKHR(VK_NULL_HANDLE)
  25868. {}
  25869. VULKAN_HPP_CONSTEXPR DisplayKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  25870. : m_displayKHR(VK_NULL_HANDLE)
  25871. {}
  25872. VULKAN_HPP_TYPESAFE_EXPLICIT DisplayKHR( VkDisplayKHR displayKHR ) VULKAN_HPP_NOEXCEPT
  25873. : m_displayKHR( displayKHR )
  25874. {}
  25875. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  25876. DisplayKHR & operator=(VkDisplayKHR displayKHR) VULKAN_HPP_NOEXCEPT
  25877. {
  25878. m_displayKHR = displayKHR;
  25879. return *this;
  25880. }
  25881. #endif
  25882. DisplayKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  25883. {
  25884. m_displayKHR = VK_NULL_HANDLE;
  25885. return *this;
  25886. }
  25887. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25888. auto operator<=>( DisplayKHR const& ) const = default;
  25889. #else
  25890. bool operator==( DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  25891. {
  25892. return m_displayKHR == rhs.m_displayKHR;
  25893. }
  25894. bool operator!=(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  25895. {
  25896. return m_displayKHR != rhs.m_displayKHR;
  25897. }
  25898. bool operator<(DisplayKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  25899. {
  25900. return m_displayKHR < rhs.m_displayKHR;
  25901. }
  25902. #endif
  25903. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayKHR() const VULKAN_HPP_NOEXCEPT
  25904. {
  25905. return m_displayKHR;
  25906. }
  25907. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  25908. {
  25909. return m_displayKHR != VK_NULL_HANDLE;
  25910. }
  25911. bool operator!() const VULKAN_HPP_NOEXCEPT
  25912. {
  25913. return m_displayKHR == VK_NULL_HANDLE;
  25914. }
  25915. private:
  25916. VkDisplayKHR m_displayKHR;
  25917. };
  25918. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
  25919. template <>
  25920. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayKHR>
  25921. {
  25922. using type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  25923. };
  25924. template <>
  25925. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayKHR>
  25926. {
  25927. using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  25928. };
  25929. template <>
  25930. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayKHR>
  25931. {
  25932. using Type = VULKAN_HPP_NAMESPACE::DisplayKHR;
  25933. };
  25934. template <>
  25935. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayKHR>
  25936. {
  25937. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  25938. };
  25939. struct PerformanceConfigurationAcquireInfoINTEL
  25940. {
  25941. static const bool allowDuplicate = false;
  25942. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
  25943. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25944. VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated) VULKAN_HPP_NOEXCEPT
  25945. : type( type_ )
  25946. {}
  25947. VULKAN_HPP_CONSTEXPR PerformanceConfigurationAcquireInfoINTEL( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25948. PerformanceConfigurationAcquireInfoINTEL( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  25949. : PerformanceConfigurationAcquireInfoINTEL( *reinterpret_cast<PerformanceConfigurationAcquireInfoINTEL const *>( &rhs ) )
  25950. {}
  25951. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  25952. VULKAN_HPP_CONSTEXPR_14 PerformanceConfigurationAcquireInfoINTEL & operator=( PerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  25953. PerformanceConfigurationAcquireInfoINTEL & operator=( VkPerformanceConfigurationAcquireInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  25954. {
  25955. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL const *>( &rhs );
  25956. return *this;
  25957. }
  25958. PerformanceConfigurationAcquireInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  25959. {
  25960. pNext = pNext_;
  25961. return *this;
  25962. }
  25963. PerformanceConfigurationAcquireInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
  25964. {
  25965. type = type_;
  25966. return *this;
  25967. }
  25968. operator VkPerformanceConfigurationAcquireInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  25969. {
  25970. return *reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL*>( this );
  25971. }
  25972. operator VkPerformanceConfigurationAcquireInfoINTEL &() VULKAN_HPP_NOEXCEPT
  25973. {
  25974. return *reinterpret_cast<VkPerformanceConfigurationAcquireInfoINTEL*>( this );
  25975. }
  25976. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  25977. auto operator<=>( PerformanceConfigurationAcquireInfoINTEL const& ) const = default;
  25978. #else
  25979. bool operator==( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  25980. {
  25981. return ( sType == rhs.sType )
  25982. && ( pNext == rhs.pNext )
  25983. && ( type == rhs.type );
  25984. }
  25985. bool operator!=( PerformanceConfigurationAcquireInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  25986. {
  25987. return !operator==( rhs );
  25988. }
  25989. #endif
  25990. public:
  25991. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceConfigurationAcquireInfoINTEL;
  25992. const void* pNext = {};
  25993. VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL::eCommandQueueMetricsDiscoveryActivated;
  25994. };
  25995. static_assert( sizeof( PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ), "struct and wrapper have different size!" );
  25996. static_assert( std::is_standard_layout<PerformanceConfigurationAcquireInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  25997. template <>
  25998. struct CppType<StructureType, StructureType::ePerformanceConfigurationAcquireInfoINTEL>
  25999. {
  26000. using Type = PerformanceConfigurationAcquireInfoINTEL;
  26001. };
  26002. class PerformanceConfigurationINTEL
  26003. {
  26004. public:
  26005. using CType = VkPerformanceConfigurationINTEL;
  26006. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL;
  26007. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  26008. public:
  26009. VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL() VULKAN_HPP_NOEXCEPT
  26010. : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
  26011. {}
  26012. VULKAN_HPP_CONSTEXPR PerformanceConfigurationINTEL( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26013. : m_performanceConfigurationINTEL(VK_NULL_HANDLE)
  26014. {}
  26015. VULKAN_HPP_TYPESAFE_EXPLICIT PerformanceConfigurationINTEL( VkPerformanceConfigurationINTEL performanceConfigurationINTEL ) VULKAN_HPP_NOEXCEPT
  26016. : m_performanceConfigurationINTEL( performanceConfigurationINTEL )
  26017. {}
  26018. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  26019. PerformanceConfigurationINTEL & operator=(VkPerformanceConfigurationINTEL performanceConfigurationINTEL) VULKAN_HPP_NOEXCEPT
  26020. {
  26021. m_performanceConfigurationINTEL = performanceConfigurationINTEL;
  26022. return *this;
  26023. }
  26024. #endif
  26025. PerformanceConfigurationINTEL & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26026. {
  26027. m_performanceConfigurationINTEL = VK_NULL_HANDLE;
  26028. return *this;
  26029. }
  26030. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26031. auto operator<=>( PerformanceConfigurationINTEL const& ) const = default;
  26032. #else
  26033. bool operator==( PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
  26034. {
  26035. return m_performanceConfigurationINTEL == rhs.m_performanceConfigurationINTEL;
  26036. }
  26037. bool operator!=(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
  26038. {
  26039. return m_performanceConfigurationINTEL != rhs.m_performanceConfigurationINTEL;
  26040. }
  26041. bool operator<(PerformanceConfigurationINTEL const & rhs ) const VULKAN_HPP_NOEXCEPT
  26042. {
  26043. return m_performanceConfigurationINTEL < rhs.m_performanceConfigurationINTEL;
  26044. }
  26045. #endif
  26046. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPerformanceConfigurationINTEL() const VULKAN_HPP_NOEXCEPT
  26047. {
  26048. return m_performanceConfigurationINTEL;
  26049. }
  26050. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  26051. {
  26052. return m_performanceConfigurationINTEL != VK_NULL_HANDLE;
  26053. }
  26054. bool operator!() const VULKAN_HPP_NOEXCEPT
  26055. {
  26056. return m_performanceConfigurationINTEL == VK_NULL_HANDLE;
  26057. }
  26058. private:
  26059. VkPerformanceConfigurationINTEL m_performanceConfigurationINTEL;
  26060. };
  26061. static_assert( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ), "handle and wrapper have different size!" );
  26062. template <>
  26063. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePerformanceConfigurationINTEL>
  26064. {
  26065. using type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
  26066. };
  26067. template <>
  26068. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePerformanceConfigurationINTEL>
  26069. {
  26070. using Type = VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
  26071. };
  26072. template <>
  26073. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
  26074. {
  26075. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  26076. };
  26077. class QueryPool
  26078. {
  26079. public:
  26080. using CType = VkQueryPool;
  26081. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool;
  26082. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool;
  26083. public:
  26084. VULKAN_HPP_CONSTEXPR QueryPool() VULKAN_HPP_NOEXCEPT
  26085. : m_queryPool(VK_NULL_HANDLE)
  26086. {}
  26087. VULKAN_HPP_CONSTEXPR QueryPool( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26088. : m_queryPool(VK_NULL_HANDLE)
  26089. {}
  26090. VULKAN_HPP_TYPESAFE_EXPLICIT QueryPool( VkQueryPool queryPool ) VULKAN_HPP_NOEXCEPT
  26091. : m_queryPool( queryPool )
  26092. {}
  26093. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  26094. QueryPool & operator=(VkQueryPool queryPool) VULKAN_HPP_NOEXCEPT
  26095. {
  26096. m_queryPool = queryPool;
  26097. return *this;
  26098. }
  26099. #endif
  26100. QueryPool & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26101. {
  26102. m_queryPool = VK_NULL_HANDLE;
  26103. return *this;
  26104. }
  26105. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26106. auto operator<=>( QueryPool const& ) const = default;
  26107. #else
  26108. bool operator==( QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  26109. {
  26110. return m_queryPool == rhs.m_queryPool;
  26111. }
  26112. bool operator!=(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  26113. {
  26114. return m_queryPool != rhs.m_queryPool;
  26115. }
  26116. bool operator<(QueryPool const & rhs ) const VULKAN_HPP_NOEXCEPT
  26117. {
  26118. return m_queryPool < rhs.m_queryPool;
  26119. }
  26120. #endif
  26121. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueryPool() const VULKAN_HPP_NOEXCEPT
  26122. {
  26123. return m_queryPool;
  26124. }
  26125. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  26126. {
  26127. return m_queryPool != VK_NULL_HANDLE;
  26128. }
  26129. bool operator!() const VULKAN_HPP_NOEXCEPT
  26130. {
  26131. return m_queryPool == VK_NULL_HANDLE;
  26132. }
  26133. private:
  26134. VkQueryPool m_queryPool;
  26135. };
  26136. static_assert( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
  26137. template <>
  26138. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueryPool>
  26139. {
  26140. using type = VULKAN_HPP_NAMESPACE::QueryPool;
  26141. };
  26142. template <>
  26143. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueryPool>
  26144. {
  26145. using Type = VULKAN_HPP_NAMESPACE::QueryPool;
  26146. };
  26147. template <>
  26148. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueryPool>
  26149. {
  26150. using Type = VULKAN_HPP_NAMESPACE::QueryPool;
  26151. };
  26152. template <>
  26153. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::QueryPool>
  26154. {
  26155. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  26156. };
  26157. struct RenderPassBeginInfo
  26158. {
  26159. static const bool allowDuplicate = false;
  26160. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassBeginInfo;
  26161. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26162. VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo(VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ = {}, VULKAN_HPP_NAMESPACE::Rect2D renderArea_ = {}, uint32_t clearValueCount_ = {}, const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ = {}) VULKAN_HPP_NOEXCEPT
  26163. : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( clearValueCount_ ), pClearValues( pClearValues_ )
  26164. {}
  26165. VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26166. RenderPassBeginInfo( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26167. : RenderPassBeginInfo( *reinterpret_cast<RenderPassBeginInfo const *>( &rhs ) )
  26168. {}
  26169. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26170. RenderPassBeginInfo( VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_, VULKAN_HPP_NAMESPACE::Rect2D renderArea_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ )
  26171. : renderPass( renderPass_ ), framebuffer( framebuffer_ ), renderArea( renderArea_ ), clearValueCount( static_cast<uint32_t>( clearValues_.size() ) ), pClearValues( clearValues_.data() )
  26172. {}
  26173. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26174. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26175. VULKAN_HPP_CONSTEXPR_14 RenderPassBeginInfo & operator=( RenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26176. RenderPassBeginInfo & operator=( VkRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26177. {
  26178. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo const *>( &rhs );
  26179. return *this;
  26180. }
  26181. RenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26182. {
  26183. pNext = pNext_;
  26184. return *this;
  26185. }
  26186. RenderPassBeginInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
  26187. {
  26188. renderPass = renderPass_;
  26189. return *this;
  26190. }
  26191. RenderPassBeginInfo & setFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer_ ) VULKAN_HPP_NOEXCEPT
  26192. {
  26193. framebuffer = framebuffer_;
  26194. return *this;
  26195. }
  26196. RenderPassBeginInfo & setRenderArea( VULKAN_HPP_NAMESPACE::Rect2D const & renderArea_ ) VULKAN_HPP_NOEXCEPT
  26197. {
  26198. renderArea = renderArea_;
  26199. return *this;
  26200. }
  26201. RenderPassBeginInfo & setClearValueCount( uint32_t clearValueCount_ ) VULKAN_HPP_NOEXCEPT
  26202. {
  26203. clearValueCount = clearValueCount_;
  26204. return *this;
  26205. }
  26206. RenderPassBeginInfo & setPClearValues( const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues_ ) VULKAN_HPP_NOEXCEPT
  26207. {
  26208. pClearValues = pClearValues_;
  26209. return *this;
  26210. }
  26211. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26212. RenderPassBeginInfo & setClearValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ClearValue> const & clearValues_ ) VULKAN_HPP_NOEXCEPT
  26213. {
  26214. clearValueCount = static_cast<uint32_t>( clearValues_.size() );
  26215. pClearValues = clearValues_.data();
  26216. return *this;
  26217. }
  26218. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26219. operator VkRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  26220. {
  26221. return *reinterpret_cast<const VkRenderPassBeginInfo*>( this );
  26222. }
  26223. operator VkRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
  26224. {
  26225. return *reinterpret_cast<VkRenderPassBeginInfo*>( this );
  26226. }
  26227. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26228. auto operator<=>( RenderPassBeginInfo const& ) const = default;
  26229. #else
  26230. bool operator==( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26231. {
  26232. return ( sType == rhs.sType )
  26233. && ( pNext == rhs.pNext )
  26234. && ( renderPass == rhs.renderPass )
  26235. && ( framebuffer == rhs.framebuffer )
  26236. && ( renderArea == rhs.renderArea )
  26237. && ( clearValueCount == rhs.clearValueCount )
  26238. && ( pClearValues == rhs.pClearValues );
  26239. }
  26240. bool operator!=( RenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26241. {
  26242. return !operator==( rhs );
  26243. }
  26244. #endif
  26245. public:
  26246. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassBeginInfo;
  26247. const void* pNext = {};
  26248. VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
  26249. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer = {};
  26250. VULKAN_HPP_NAMESPACE::Rect2D renderArea = {};
  26251. uint32_t clearValueCount = {};
  26252. const VULKAN_HPP_NAMESPACE::ClearValue* pClearValues = {};
  26253. };
  26254. static_assert( sizeof( RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
  26255. static_assert( std::is_standard_layout<RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
  26256. template <>
  26257. struct CppType<StructureType, StructureType::eRenderPassBeginInfo>
  26258. {
  26259. using Type = RenderPassBeginInfo;
  26260. };
  26261. struct SubpassBeginInfo
  26262. {
  26263. static const bool allowDuplicate = false;
  26264. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassBeginInfo;
  26265. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26266. VULKAN_HPP_CONSTEXPR SubpassBeginInfo(VULKAN_HPP_NAMESPACE::SubpassContents contents_ = VULKAN_HPP_NAMESPACE::SubpassContents::eInline) VULKAN_HPP_NOEXCEPT
  26267. : contents( contents_ )
  26268. {}
  26269. VULKAN_HPP_CONSTEXPR SubpassBeginInfo( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26270. SubpassBeginInfo( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26271. : SubpassBeginInfo( *reinterpret_cast<SubpassBeginInfo const *>( &rhs ) )
  26272. {}
  26273. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26274. VULKAN_HPP_CONSTEXPR_14 SubpassBeginInfo & operator=( SubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26275. SubpassBeginInfo & operator=( VkSubpassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26276. {
  26277. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassBeginInfo const *>( &rhs );
  26278. return *this;
  26279. }
  26280. SubpassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26281. {
  26282. pNext = pNext_;
  26283. return *this;
  26284. }
  26285. SubpassBeginInfo & setContents( VULKAN_HPP_NAMESPACE::SubpassContents contents_ ) VULKAN_HPP_NOEXCEPT
  26286. {
  26287. contents = contents_;
  26288. return *this;
  26289. }
  26290. operator VkSubpassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  26291. {
  26292. return *reinterpret_cast<const VkSubpassBeginInfo*>( this );
  26293. }
  26294. operator VkSubpassBeginInfo &() VULKAN_HPP_NOEXCEPT
  26295. {
  26296. return *reinterpret_cast<VkSubpassBeginInfo*>( this );
  26297. }
  26298. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26299. auto operator<=>( SubpassBeginInfo const& ) const = default;
  26300. #else
  26301. bool operator==( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26302. {
  26303. return ( sType == rhs.sType )
  26304. && ( pNext == rhs.pNext )
  26305. && ( contents == rhs.contents );
  26306. }
  26307. bool operator!=( SubpassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26308. {
  26309. return !operator==( rhs );
  26310. }
  26311. #endif
  26312. public:
  26313. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassBeginInfo;
  26314. const void* pNext = {};
  26315. VULKAN_HPP_NAMESPACE::SubpassContents contents = VULKAN_HPP_NAMESPACE::SubpassContents::eInline;
  26316. };
  26317. static_assert( sizeof( SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
  26318. static_assert( std::is_standard_layout<SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
  26319. template <>
  26320. struct CppType<StructureType, StructureType::eSubpassBeginInfo>
  26321. {
  26322. using Type = SubpassBeginInfo;
  26323. };
  26324. using SubpassBeginInfoKHR = SubpassBeginInfo;
  26325. struct ImageBlit
  26326. {
  26327. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26328. VULKAN_HPP_CONSTEXPR_14 ImageBlit(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& srcOffsets_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const& dstOffsets_ = {}) VULKAN_HPP_NOEXCEPT
  26329. : srcSubresource( srcSubresource_ ), srcOffsets( srcOffsets_ ), dstSubresource( dstSubresource_ ), dstOffsets( dstOffsets_ )
  26330. {}
  26331. VULKAN_HPP_CONSTEXPR_14 ImageBlit( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26332. ImageBlit( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
  26333. : ImageBlit( *reinterpret_cast<ImageBlit const *>( &rhs ) )
  26334. {}
  26335. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26336. VULKAN_HPP_CONSTEXPR_14 ImageBlit & operator=( ImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26337. ImageBlit & operator=( VkImageBlit const & rhs ) VULKAN_HPP_NOEXCEPT
  26338. {
  26339. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageBlit const *>( &rhs );
  26340. return *this;
  26341. }
  26342. ImageBlit & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  26343. {
  26344. srcSubresource = srcSubresource_;
  26345. return *this;
  26346. }
  26347. ImageBlit & setSrcOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & srcOffsets_ ) VULKAN_HPP_NOEXCEPT
  26348. {
  26349. srcOffsets = srcOffsets_;
  26350. return *this;
  26351. }
  26352. ImageBlit & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  26353. {
  26354. dstSubresource = dstSubresource_;
  26355. return *this;
  26356. }
  26357. ImageBlit & setDstOffsets( std::array<VULKAN_HPP_NAMESPACE::Offset3D,2> const & dstOffsets_ ) VULKAN_HPP_NOEXCEPT
  26358. {
  26359. dstOffsets = dstOffsets_;
  26360. return *this;
  26361. }
  26362. operator VkImageBlit const&() const VULKAN_HPP_NOEXCEPT
  26363. {
  26364. return *reinterpret_cast<const VkImageBlit*>( this );
  26365. }
  26366. operator VkImageBlit &() VULKAN_HPP_NOEXCEPT
  26367. {
  26368. return *reinterpret_cast<VkImageBlit*>( this );
  26369. }
  26370. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26371. auto operator<=>( ImageBlit const& ) const = default;
  26372. #else
  26373. bool operator==( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
  26374. {
  26375. return ( srcSubresource == rhs.srcSubresource )
  26376. && ( srcOffsets == rhs.srcOffsets )
  26377. && ( dstSubresource == rhs.dstSubresource )
  26378. && ( dstOffsets == rhs.dstOffsets );
  26379. }
  26380. bool operator!=( ImageBlit const& rhs ) const VULKAN_HPP_NOEXCEPT
  26381. {
  26382. return !operator==( rhs );
  26383. }
  26384. #endif
  26385. public:
  26386. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  26387. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> srcOffsets = {};
  26388. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  26389. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::Offset3D, 2> dstOffsets = {};
  26390. };
  26391. static_assert( sizeof( ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
  26392. static_assert( std::is_standard_layout<ImageBlit>::value, "struct wrapper is not a standard layout!" );
  26393. struct ImageSubresourceRange
  26394. {
  26395. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26396. VULKAN_HPP_CONSTEXPR ImageSubresourceRange(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, uint32_t baseMipLevel_ = {}, uint32_t levelCount_ = {}, uint32_t baseArrayLayer_ = {}, uint32_t layerCount_ = {}) VULKAN_HPP_NOEXCEPT
  26397. : aspectMask( aspectMask_ ), baseMipLevel( baseMipLevel_ ), levelCount( levelCount_ ), baseArrayLayer( baseArrayLayer_ ), layerCount( layerCount_ )
  26398. {}
  26399. VULKAN_HPP_CONSTEXPR ImageSubresourceRange( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26400. ImageSubresourceRange( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
  26401. : ImageSubresourceRange( *reinterpret_cast<ImageSubresourceRange const *>( &rhs ) )
  26402. {}
  26403. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26404. VULKAN_HPP_CONSTEXPR_14 ImageSubresourceRange & operator=( ImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26405. ImageSubresourceRange & operator=( VkImageSubresourceRange const & rhs ) VULKAN_HPP_NOEXCEPT
  26406. {
  26407. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSubresourceRange const *>( &rhs );
  26408. return *this;
  26409. }
  26410. ImageSubresourceRange & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  26411. {
  26412. aspectMask = aspectMask_;
  26413. return *this;
  26414. }
  26415. ImageSubresourceRange & setBaseMipLevel( uint32_t baseMipLevel_ ) VULKAN_HPP_NOEXCEPT
  26416. {
  26417. baseMipLevel = baseMipLevel_;
  26418. return *this;
  26419. }
  26420. ImageSubresourceRange & setLevelCount( uint32_t levelCount_ ) VULKAN_HPP_NOEXCEPT
  26421. {
  26422. levelCount = levelCount_;
  26423. return *this;
  26424. }
  26425. ImageSubresourceRange & setBaseArrayLayer( uint32_t baseArrayLayer_ ) VULKAN_HPP_NOEXCEPT
  26426. {
  26427. baseArrayLayer = baseArrayLayer_;
  26428. return *this;
  26429. }
  26430. ImageSubresourceRange & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
  26431. {
  26432. layerCount = layerCount_;
  26433. return *this;
  26434. }
  26435. operator VkImageSubresourceRange const&() const VULKAN_HPP_NOEXCEPT
  26436. {
  26437. return *reinterpret_cast<const VkImageSubresourceRange*>( this );
  26438. }
  26439. operator VkImageSubresourceRange &() VULKAN_HPP_NOEXCEPT
  26440. {
  26441. return *reinterpret_cast<VkImageSubresourceRange*>( this );
  26442. }
  26443. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26444. auto operator<=>( ImageSubresourceRange const& ) const = default;
  26445. #else
  26446. bool operator==( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  26447. {
  26448. return ( aspectMask == rhs.aspectMask )
  26449. && ( baseMipLevel == rhs.baseMipLevel )
  26450. && ( levelCount == rhs.levelCount )
  26451. && ( baseArrayLayer == rhs.baseArrayLayer )
  26452. && ( layerCount == rhs.layerCount );
  26453. }
  26454. bool operator!=( ImageSubresourceRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  26455. {
  26456. return !operator==( rhs );
  26457. }
  26458. #endif
  26459. public:
  26460. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  26461. uint32_t baseMipLevel = {};
  26462. uint32_t levelCount = {};
  26463. uint32_t baseArrayLayer = {};
  26464. uint32_t layerCount = {};
  26465. };
  26466. static_assert( sizeof( ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ), "struct and wrapper have different size!" );
  26467. static_assert( std::is_standard_layout<ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
  26468. struct ImageCopy
  26469. {
  26470. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26471. VULKAN_HPP_CONSTEXPR ImageCopy(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
  26472. : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
  26473. {}
  26474. VULKAN_HPP_CONSTEXPR ImageCopy( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26475. ImageCopy( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  26476. : ImageCopy( *reinterpret_cast<ImageCopy const *>( &rhs ) )
  26477. {}
  26478. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26479. VULKAN_HPP_CONSTEXPR_14 ImageCopy & operator=( ImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26480. ImageCopy & operator=( VkImageCopy const & rhs ) VULKAN_HPP_NOEXCEPT
  26481. {
  26482. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCopy const *>( &rhs );
  26483. return *this;
  26484. }
  26485. ImageCopy & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  26486. {
  26487. srcSubresource = srcSubresource_;
  26488. return *this;
  26489. }
  26490. ImageCopy & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
  26491. {
  26492. srcOffset = srcOffset_;
  26493. return *this;
  26494. }
  26495. ImageCopy & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  26496. {
  26497. dstSubresource = dstSubresource_;
  26498. return *this;
  26499. }
  26500. ImageCopy & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
  26501. {
  26502. dstOffset = dstOffset_;
  26503. return *this;
  26504. }
  26505. ImageCopy & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  26506. {
  26507. extent = extent_;
  26508. return *this;
  26509. }
  26510. operator VkImageCopy const&() const VULKAN_HPP_NOEXCEPT
  26511. {
  26512. return *reinterpret_cast<const VkImageCopy*>( this );
  26513. }
  26514. operator VkImageCopy &() VULKAN_HPP_NOEXCEPT
  26515. {
  26516. return *reinterpret_cast<VkImageCopy*>( this );
  26517. }
  26518. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26519. auto operator<=>( ImageCopy const& ) const = default;
  26520. #else
  26521. bool operator==( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  26522. {
  26523. return ( srcSubresource == rhs.srcSubresource )
  26524. && ( srcOffset == rhs.srcOffset )
  26525. && ( dstSubresource == rhs.dstSubresource )
  26526. && ( dstOffset == rhs.dstOffset )
  26527. && ( extent == rhs.extent );
  26528. }
  26529. bool operator!=( ImageCopy const& rhs ) const VULKAN_HPP_NOEXCEPT
  26530. {
  26531. return !operator==( rhs );
  26532. }
  26533. #endif
  26534. public:
  26535. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  26536. VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
  26537. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  26538. VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
  26539. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  26540. };
  26541. static_assert( sizeof( ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
  26542. static_assert( std::is_standard_layout<ImageCopy>::value, "struct wrapper is not a standard layout!" );
  26543. struct SubpassEndInfo
  26544. {
  26545. static const bool allowDuplicate = false;
  26546. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassEndInfo;
  26547. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26548. VULKAN_HPP_CONSTEXPR SubpassEndInfo() VULKAN_HPP_NOEXCEPT
  26549. {}
  26550. VULKAN_HPP_CONSTEXPR SubpassEndInfo( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26551. SubpassEndInfo( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26552. : SubpassEndInfo( *reinterpret_cast<SubpassEndInfo const *>( &rhs ) )
  26553. {}
  26554. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26555. VULKAN_HPP_CONSTEXPR_14 SubpassEndInfo & operator=( SubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26556. SubpassEndInfo & operator=( VkSubpassEndInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  26557. {
  26558. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassEndInfo const *>( &rhs );
  26559. return *this;
  26560. }
  26561. SubpassEndInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26562. {
  26563. pNext = pNext_;
  26564. return *this;
  26565. }
  26566. operator VkSubpassEndInfo const&() const VULKAN_HPP_NOEXCEPT
  26567. {
  26568. return *reinterpret_cast<const VkSubpassEndInfo*>( this );
  26569. }
  26570. operator VkSubpassEndInfo &() VULKAN_HPP_NOEXCEPT
  26571. {
  26572. return *reinterpret_cast<VkSubpassEndInfo*>( this );
  26573. }
  26574. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26575. auto operator<=>( SubpassEndInfo const& ) const = default;
  26576. #else
  26577. bool operator==( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26578. {
  26579. return ( sType == rhs.sType )
  26580. && ( pNext == rhs.pNext );
  26581. }
  26582. bool operator!=( SubpassEndInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  26583. {
  26584. return !operator==( rhs );
  26585. }
  26586. #endif
  26587. public:
  26588. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassEndInfo;
  26589. const void* pNext = {};
  26590. };
  26591. static_assert( sizeof( SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
  26592. static_assert( std::is_standard_layout<SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
  26593. template <>
  26594. struct CppType<StructureType, StructureType::eSubpassEndInfo>
  26595. {
  26596. using Type = SubpassEndInfo;
  26597. };
  26598. using SubpassEndInfoKHR = SubpassEndInfo;
  26599. class IndirectCommandsLayoutNV
  26600. {
  26601. public:
  26602. using CType = VkIndirectCommandsLayoutNV;
  26603. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV;
  26604. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  26605. public:
  26606. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV() VULKAN_HPP_NOEXCEPT
  26607. : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
  26608. {}
  26609. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutNV( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26610. : m_indirectCommandsLayoutNV(VK_NULL_HANDLE)
  26611. {}
  26612. VULKAN_HPP_TYPESAFE_EXPLICIT IndirectCommandsLayoutNV( VkIndirectCommandsLayoutNV indirectCommandsLayoutNV ) VULKAN_HPP_NOEXCEPT
  26613. : m_indirectCommandsLayoutNV( indirectCommandsLayoutNV )
  26614. {}
  26615. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  26616. IndirectCommandsLayoutNV & operator=(VkIndirectCommandsLayoutNV indirectCommandsLayoutNV) VULKAN_HPP_NOEXCEPT
  26617. {
  26618. m_indirectCommandsLayoutNV = indirectCommandsLayoutNV;
  26619. return *this;
  26620. }
  26621. #endif
  26622. IndirectCommandsLayoutNV & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  26623. {
  26624. m_indirectCommandsLayoutNV = VK_NULL_HANDLE;
  26625. return *this;
  26626. }
  26627. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26628. auto operator<=>( IndirectCommandsLayoutNV const& ) const = default;
  26629. #else
  26630. bool operator==( IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  26631. {
  26632. return m_indirectCommandsLayoutNV == rhs.m_indirectCommandsLayoutNV;
  26633. }
  26634. bool operator!=(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  26635. {
  26636. return m_indirectCommandsLayoutNV != rhs.m_indirectCommandsLayoutNV;
  26637. }
  26638. bool operator<(IndirectCommandsLayoutNV const & rhs ) const VULKAN_HPP_NOEXCEPT
  26639. {
  26640. return m_indirectCommandsLayoutNV < rhs.m_indirectCommandsLayoutNV;
  26641. }
  26642. #endif
  26643. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkIndirectCommandsLayoutNV() const VULKAN_HPP_NOEXCEPT
  26644. {
  26645. return m_indirectCommandsLayoutNV;
  26646. }
  26647. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  26648. {
  26649. return m_indirectCommandsLayoutNV != VK_NULL_HANDLE;
  26650. }
  26651. bool operator!() const VULKAN_HPP_NOEXCEPT
  26652. {
  26653. return m_indirectCommandsLayoutNV == VK_NULL_HANDLE;
  26654. }
  26655. private:
  26656. VkIndirectCommandsLayoutNV m_indirectCommandsLayoutNV;
  26657. };
  26658. static_assert( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ), "handle and wrapper have different size!" );
  26659. template <>
  26660. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eIndirectCommandsLayoutNV>
  26661. {
  26662. using type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
  26663. };
  26664. template <>
  26665. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eIndirectCommandsLayoutNV>
  26666. {
  26667. using Type = VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
  26668. };
  26669. template <>
  26670. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
  26671. {
  26672. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  26673. };
  26674. struct IndirectCommandsStreamNV
  26675. {
  26676. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26677. VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV(VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}) VULKAN_HPP_NOEXCEPT
  26678. : buffer( buffer_ ), offset( offset_ )
  26679. {}
  26680. VULKAN_HPP_CONSTEXPR IndirectCommandsStreamNV( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26681. IndirectCommandsStreamNV( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
  26682. : IndirectCommandsStreamNV( *reinterpret_cast<IndirectCommandsStreamNV const *>( &rhs ) )
  26683. {}
  26684. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26685. VULKAN_HPP_CONSTEXPR_14 IndirectCommandsStreamNV & operator=( IndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26686. IndirectCommandsStreamNV & operator=( VkIndirectCommandsStreamNV const & rhs ) VULKAN_HPP_NOEXCEPT
  26687. {
  26688. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV const *>( &rhs );
  26689. return *this;
  26690. }
  26691. IndirectCommandsStreamNV & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  26692. {
  26693. buffer = buffer_;
  26694. return *this;
  26695. }
  26696. IndirectCommandsStreamNV & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  26697. {
  26698. offset = offset_;
  26699. return *this;
  26700. }
  26701. operator VkIndirectCommandsStreamNV const&() const VULKAN_HPP_NOEXCEPT
  26702. {
  26703. return *reinterpret_cast<const VkIndirectCommandsStreamNV*>( this );
  26704. }
  26705. operator VkIndirectCommandsStreamNV &() VULKAN_HPP_NOEXCEPT
  26706. {
  26707. return *reinterpret_cast<VkIndirectCommandsStreamNV*>( this );
  26708. }
  26709. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26710. auto operator<=>( IndirectCommandsStreamNV const& ) const = default;
  26711. #else
  26712. bool operator==( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  26713. {
  26714. return ( buffer == rhs.buffer )
  26715. && ( offset == rhs.offset );
  26716. }
  26717. bool operator!=( IndirectCommandsStreamNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  26718. {
  26719. return !operator==( rhs );
  26720. }
  26721. #endif
  26722. public:
  26723. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  26724. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  26725. };
  26726. static_assert( sizeof( IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ), "struct and wrapper have different size!" );
  26727. static_assert( std::is_standard_layout<IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
  26728. struct GeneratedCommandsInfoNV
  26729. {
  26730. static const bool allowDuplicate = false;
  26731. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsInfoNV;
  26732. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26733. VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t streamCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ = {}, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {}) VULKAN_HPP_NOEXCEPT
  26734. : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( streamCount_ ), pStreams( pStreams_ ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
  26735. {}
  26736. VULKAN_HPP_CONSTEXPR GeneratedCommandsInfoNV( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26737. GeneratedCommandsInfoNV( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  26738. : GeneratedCommandsInfoNV( *reinterpret_cast<GeneratedCommandsInfoNV const *>( &rhs ) )
  26739. {}
  26740. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26741. GeneratedCommandsInfoNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::Pipeline pipeline_, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_, uint32_t sequencesCount_ = {}, VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ = {}, VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ = {} )
  26742. : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), streamCount( static_cast<uint32_t>( streams_.size() ) ), pStreams( streams_.data() ), sequencesCount( sequencesCount_ ), preprocessBuffer( preprocessBuffer_ ), preprocessOffset( preprocessOffset_ ), preprocessSize( preprocessSize_ ), sequencesCountBuffer( sequencesCountBuffer_ ), sequencesCountOffset( sequencesCountOffset_ ), sequencesIndexBuffer( sequencesIndexBuffer_ ), sequencesIndexOffset( sequencesIndexOffset_ )
  26743. {}
  26744. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26745. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26746. VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsInfoNV & operator=( GeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26747. GeneratedCommandsInfoNV & operator=( VkGeneratedCommandsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  26748. {
  26749. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV const *>( &rhs );
  26750. return *this;
  26751. }
  26752. GeneratedCommandsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26753. {
  26754. pNext = pNext_;
  26755. return *this;
  26756. }
  26757. GeneratedCommandsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  26758. {
  26759. pipelineBindPoint = pipelineBindPoint_;
  26760. return *this;
  26761. }
  26762. GeneratedCommandsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
  26763. {
  26764. pipeline = pipeline_;
  26765. return *this;
  26766. }
  26767. GeneratedCommandsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
  26768. {
  26769. indirectCommandsLayout = indirectCommandsLayout_;
  26770. return *this;
  26771. }
  26772. GeneratedCommandsInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
  26773. {
  26774. streamCount = streamCount_;
  26775. return *this;
  26776. }
  26777. GeneratedCommandsInfoNV & setPStreams( const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams_ ) VULKAN_HPP_NOEXCEPT
  26778. {
  26779. pStreams = pStreams_;
  26780. return *this;
  26781. }
  26782. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26783. GeneratedCommandsInfoNV & setStreams( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV> const & streams_ ) VULKAN_HPP_NOEXCEPT
  26784. {
  26785. streamCount = static_cast<uint32_t>( streams_.size() );
  26786. pStreams = streams_.data();
  26787. return *this;
  26788. }
  26789. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  26790. GeneratedCommandsInfoNV & setSequencesCount( uint32_t sequencesCount_ ) VULKAN_HPP_NOEXCEPT
  26791. {
  26792. sequencesCount = sequencesCount_;
  26793. return *this;
  26794. }
  26795. GeneratedCommandsInfoNV & setPreprocessBuffer( VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer_ ) VULKAN_HPP_NOEXCEPT
  26796. {
  26797. preprocessBuffer = preprocessBuffer_;
  26798. return *this;
  26799. }
  26800. GeneratedCommandsInfoNV & setPreprocessOffset( VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset_ ) VULKAN_HPP_NOEXCEPT
  26801. {
  26802. preprocessOffset = preprocessOffset_;
  26803. return *this;
  26804. }
  26805. GeneratedCommandsInfoNV & setPreprocessSize( VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize_ ) VULKAN_HPP_NOEXCEPT
  26806. {
  26807. preprocessSize = preprocessSize_;
  26808. return *this;
  26809. }
  26810. GeneratedCommandsInfoNV & setSequencesCountBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer_ ) VULKAN_HPP_NOEXCEPT
  26811. {
  26812. sequencesCountBuffer = sequencesCountBuffer_;
  26813. return *this;
  26814. }
  26815. GeneratedCommandsInfoNV & setSequencesCountOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset_ ) VULKAN_HPP_NOEXCEPT
  26816. {
  26817. sequencesCountOffset = sequencesCountOffset_;
  26818. return *this;
  26819. }
  26820. GeneratedCommandsInfoNV & setSequencesIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer_ ) VULKAN_HPP_NOEXCEPT
  26821. {
  26822. sequencesIndexBuffer = sequencesIndexBuffer_;
  26823. return *this;
  26824. }
  26825. GeneratedCommandsInfoNV & setSequencesIndexOffset( VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset_ ) VULKAN_HPP_NOEXCEPT
  26826. {
  26827. sequencesIndexOffset = sequencesIndexOffset_;
  26828. return *this;
  26829. }
  26830. operator VkGeneratedCommandsInfoNV const&() const VULKAN_HPP_NOEXCEPT
  26831. {
  26832. return *reinterpret_cast<const VkGeneratedCommandsInfoNV*>( this );
  26833. }
  26834. operator VkGeneratedCommandsInfoNV &() VULKAN_HPP_NOEXCEPT
  26835. {
  26836. return *reinterpret_cast<VkGeneratedCommandsInfoNV*>( this );
  26837. }
  26838. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26839. auto operator<=>( GeneratedCommandsInfoNV const& ) const = default;
  26840. #else
  26841. bool operator==( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  26842. {
  26843. return ( sType == rhs.sType )
  26844. && ( pNext == rhs.pNext )
  26845. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  26846. && ( pipeline == rhs.pipeline )
  26847. && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
  26848. && ( streamCount == rhs.streamCount )
  26849. && ( pStreams == rhs.pStreams )
  26850. && ( sequencesCount == rhs.sequencesCount )
  26851. && ( preprocessBuffer == rhs.preprocessBuffer )
  26852. && ( preprocessOffset == rhs.preprocessOffset )
  26853. && ( preprocessSize == rhs.preprocessSize )
  26854. && ( sequencesCountBuffer == rhs.sequencesCountBuffer )
  26855. && ( sequencesCountOffset == rhs.sequencesCountOffset )
  26856. && ( sequencesIndexBuffer == rhs.sequencesIndexBuffer )
  26857. && ( sequencesIndexOffset == rhs.sequencesIndexOffset );
  26858. }
  26859. bool operator!=( GeneratedCommandsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  26860. {
  26861. return !operator==( rhs );
  26862. }
  26863. #endif
  26864. public:
  26865. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsInfoNV;
  26866. const void* pNext = {};
  26867. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  26868. VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
  26869. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
  26870. uint32_t streamCount = {};
  26871. const VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV* pStreams = {};
  26872. uint32_t sequencesCount = {};
  26873. VULKAN_HPP_NAMESPACE::Buffer preprocessBuffer = {};
  26874. VULKAN_HPP_NAMESPACE::DeviceSize preprocessOffset = {};
  26875. VULKAN_HPP_NAMESPACE::DeviceSize preprocessSize = {};
  26876. VULKAN_HPP_NAMESPACE::Buffer sequencesCountBuffer = {};
  26877. VULKAN_HPP_NAMESPACE::DeviceSize sequencesCountOffset = {};
  26878. VULKAN_HPP_NAMESPACE::Buffer sequencesIndexBuffer = {};
  26879. VULKAN_HPP_NAMESPACE::DeviceSize sequencesIndexOffset = {};
  26880. };
  26881. static_assert( sizeof( GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ), "struct and wrapper have different size!" );
  26882. static_assert( std::is_standard_layout<GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
  26883. template <>
  26884. struct CppType<StructureType, StructureType::eGeneratedCommandsInfoNV>
  26885. {
  26886. using Type = GeneratedCommandsInfoNV;
  26887. };
  26888. struct MemoryBarrier
  26889. {
  26890. static const bool allowDuplicate = false;
  26891. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryBarrier;
  26892. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26893. VULKAN_HPP_CONSTEXPR MemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}) VULKAN_HPP_NOEXCEPT
  26894. : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ )
  26895. {}
  26896. VULKAN_HPP_CONSTEXPR MemoryBarrier( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26897. MemoryBarrier( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  26898. : MemoryBarrier( *reinterpret_cast<MemoryBarrier const *>( &rhs ) )
  26899. {}
  26900. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26901. VULKAN_HPP_CONSTEXPR_14 MemoryBarrier & operator=( MemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26902. MemoryBarrier & operator=( VkMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  26903. {
  26904. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryBarrier const *>( &rhs );
  26905. return *this;
  26906. }
  26907. MemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26908. {
  26909. pNext = pNext_;
  26910. return *this;
  26911. }
  26912. MemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
  26913. {
  26914. srcAccessMask = srcAccessMask_;
  26915. return *this;
  26916. }
  26917. MemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
  26918. {
  26919. dstAccessMask = dstAccessMask_;
  26920. return *this;
  26921. }
  26922. operator VkMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
  26923. {
  26924. return *reinterpret_cast<const VkMemoryBarrier*>( this );
  26925. }
  26926. operator VkMemoryBarrier &() VULKAN_HPP_NOEXCEPT
  26927. {
  26928. return *reinterpret_cast<VkMemoryBarrier*>( this );
  26929. }
  26930. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  26931. auto operator<=>( MemoryBarrier const& ) const = default;
  26932. #else
  26933. bool operator==( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  26934. {
  26935. return ( sType == rhs.sType )
  26936. && ( pNext == rhs.pNext )
  26937. && ( srcAccessMask == rhs.srcAccessMask )
  26938. && ( dstAccessMask == rhs.dstAccessMask );
  26939. }
  26940. bool operator!=( MemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  26941. {
  26942. return !operator==( rhs );
  26943. }
  26944. #endif
  26945. public:
  26946. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryBarrier;
  26947. const void* pNext = {};
  26948. VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
  26949. VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
  26950. };
  26951. static_assert( sizeof( MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
  26952. static_assert( std::is_standard_layout<MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
  26953. template <>
  26954. struct CppType<StructureType, StructureType::eMemoryBarrier>
  26955. {
  26956. using Type = MemoryBarrier;
  26957. };
  26958. struct ImageMemoryBarrier
  26959. {
  26960. static const bool allowDuplicate = false;
  26961. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryBarrier;
  26962. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26963. VULKAN_HPP_CONSTEXPR ImageMemoryBarrier(VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t srcQueueFamilyIndex_ = {}, uint32_t dstQueueFamilyIndex_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
  26964. : srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), oldLayout( oldLayout_ ), newLayout( newLayout_ ), srcQueueFamilyIndex( srcQueueFamilyIndex_ ), dstQueueFamilyIndex( dstQueueFamilyIndex_ ), image( image_ ), subresourceRange( subresourceRange_ )
  26965. {}
  26966. VULKAN_HPP_CONSTEXPR ImageMemoryBarrier( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26967. ImageMemoryBarrier( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  26968. : ImageMemoryBarrier( *reinterpret_cast<ImageMemoryBarrier const *>( &rhs ) )
  26969. {}
  26970. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  26971. VULKAN_HPP_CONSTEXPR_14 ImageMemoryBarrier & operator=( ImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  26972. ImageMemoryBarrier & operator=( VkImageMemoryBarrier const & rhs ) VULKAN_HPP_NOEXCEPT
  26973. {
  26974. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier const *>( &rhs );
  26975. return *this;
  26976. }
  26977. ImageMemoryBarrier & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  26978. {
  26979. pNext = pNext_;
  26980. return *this;
  26981. }
  26982. ImageMemoryBarrier & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
  26983. {
  26984. srcAccessMask = srcAccessMask_;
  26985. return *this;
  26986. }
  26987. ImageMemoryBarrier & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
  26988. {
  26989. dstAccessMask = dstAccessMask_;
  26990. return *this;
  26991. }
  26992. ImageMemoryBarrier & setOldLayout( VULKAN_HPP_NAMESPACE::ImageLayout oldLayout_ ) VULKAN_HPP_NOEXCEPT
  26993. {
  26994. oldLayout = oldLayout_;
  26995. return *this;
  26996. }
  26997. ImageMemoryBarrier & setNewLayout( VULKAN_HPP_NAMESPACE::ImageLayout newLayout_ ) VULKAN_HPP_NOEXCEPT
  26998. {
  26999. newLayout = newLayout_;
  27000. return *this;
  27001. }
  27002. ImageMemoryBarrier & setSrcQueueFamilyIndex( uint32_t srcQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  27003. {
  27004. srcQueueFamilyIndex = srcQueueFamilyIndex_;
  27005. return *this;
  27006. }
  27007. ImageMemoryBarrier & setDstQueueFamilyIndex( uint32_t dstQueueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  27008. {
  27009. dstQueueFamilyIndex = dstQueueFamilyIndex_;
  27010. return *this;
  27011. }
  27012. ImageMemoryBarrier & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  27013. {
  27014. image = image_;
  27015. return *this;
  27016. }
  27017. ImageMemoryBarrier & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
  27018. {
  27019. subresourceRange = subresourceRange_;
  27020. return *this;
  27021. }
  27022. operator VkImageMemoryBarrier const&() const VULKAN_HPP_NOEXCEPT
  27023. {
  27024. return *reinterpret_cast<const VkImageMemoryBarrier*>( this );
  27025. }
  27026. operator VkImageMemoryBarrier &() VULKAN_HPP_NOEXCEPT
  27027. {
  27028. return *reinterpret_cast<VkImageMemoryBarrier*>( this );
  27029. }
  27030. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27031. auto operator<=>( ImageMemoryBarrier const& ) const = default;
  27032. #else
  27033. bool operator==( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  27034. {
  27035. return ( sType == rhs.sType )
  27036. && ( pNext == rhs.pNext )
  27037. && ( srcAccessMask == rhs.srcAccessMask )
  27038. && ( dstAccessMask == rhs.dstAccessMask )
  27039. && ( oldLayout == rhs.oldLayout )
  27040. && ( newLayout == rhs.newLayout )
  27041. && ( srcQueueFamilyIndex == rhs.srcQueueFamilyIndex )
  27042. && ( dstQueueFamilyIndex == rhs.dstQueueFamilyIndex )
  27043. && ( image == rhs.image )
  27044. && ( subresourceRange == rhs.subresourceRange );
  27045. }
  27046. bool operator!=( ImageMemoryBarrier const& rhs ) const VULKAN_HPP_NOEXCEPT
  27047. {
  27048. return !operator==( rhs );
  27049. }
  27050. #endif
  27051. public:
  27052. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryBarrier;
  27053. const void* pNext = {};
  27054. VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
  27055. VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
  27056. VULKAN_HPP_NAMESPACE::ImageLayout oldLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  27057. VULKAN_HPP_NAMESPACE::ImageLayout newLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  27058. uint32_t srcQueueFamilyIndex = {};
  27059. uint32_t dstQueueFamilyIndex = {};
  27060. VULKAN_HPP_NAMESPACE::Image image = {};
  27061. VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
  27062. };
  27063. static_assert( sizeof( ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
  27064. static_assert( std::is_standard_layout<ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
  27065. template <>
  27066. struct CppType<StructureType, StructureType::eImageMemoryBarrier>
  27067. {
  27068. using Type = ImageMemoryBarrier;
  27069. };
  27070. class BufferView
  27071. {
  27072. public:
  27073. using CType = VkBufferView;
  27074. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eBufferView;
  27075. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView;
  27076. public:
  27077. VULKAN_HPP_CONSTEXPR BufferView() VULKAN_HPP_NOEXCEPT
  27078. : m_bufferView(VK_NULL_HANDLE)
  27079. {}
  27080. VULKAN_HPP_CONSTEXPR BufferView( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27081. : m_bufferView(VK_NULL_HANDLE)
  27082. {}
  27083. VULKAN_HPP_TYPESAFE_EXPLICIT BufferView( VkBufferView bufferView ) VULKAN_HPP_NOEXCEPT
  27084. : m_bufferView( bufferView )
  27085. {}
  27086. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  27087. BufferView & operator=(VkBufferView bufferView) VULKAN_HPP_NOEXCEPT
  27088. {
  27089. m_bufferView = bufferView;
  27090. return *this;
  27091. }
  27092. #endif
  27093. BufferView & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27094. {
  27095. m_bufferView = VK_NULL_HANDLE;
  27096. return *this;
  27097. }
  27098. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27099. auto operator<=>( BufferView const& ) const = default;
  27100. #else
  27101. bool operator==( BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
  27102. {
  27103. return m_bufferView == rhs.m_bufferView;
  27104. }
  27105. bool operator!=(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
  27106. {
  27107. return m_bufferView != rhs.m_bufferView;
  27108. }
  27109. bool operator<(BufferView const & rhs ) const VULKAN_HPP_NOEXCEPT
  27110. {
  27111. return m_bufferView < rhs.m_bufferView;
  27112. }
  27113. #endif
  27114. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkBufferView() const VULKAN_HPP_NOEXCEPT
  27115. {
  27116. return m_bufferView;
  27117. }
  27118. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  27119. {
  27120. return m_bufferView != VK_NULL_HANDLE;
  27121. }
  27122. bool operator!() const VULKAN_HPP_NOEXCEPT
  27123. {
  27124. return m_bufferView == VK_NULL_HANDLE;
  27125. }
  27126. private:
  27127. VkBufferView m_bufferView;
  27128. };
  27129. static_assert( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
  27130. template <>
  27131. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eBufferView>
  27132. {
  27133. using type = VULKAN_HPP_NAMESPACE::BufferView;
  27134. };
  27135. template <>
  27136. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eBufferView>
  27137. {
  27138. using Type = VULKAN_HPP_NAMESPACE::BufferView;
  27139. };
  27140. template <>
  27141. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eBufferView>
  27142. {
  27143. using Type = VULKAN_HPP_NAMESPACE::BufferView;
  27144. };
  27145. template <>
  27146. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::BufferView>
  27147. {
  27148. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  27149. };
  27150. struct WriteDescriptorSet
  27151. {
  27152. static const bool allowDuplicate = false;
  27153. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSet;
  27154. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27155. VULKAN_HPP_CONSTEXPR WriteDescriptorSet(VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ = {}, uint32_t dstBinding_ = {}, uint32_t dstArrayElement_ = {}, uint32_t descriptorCount_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ = {}, const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ = {}) VULKAN_HPP_NOEXCEPT
  27156. : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( descriptorCount_ ), descriptorType( descriptorType_ ), pImageInfo( pImageInfo_ ), pBufferInfo( pBufferInfo_ ), pTexelBufferView( pTexelBufferView_ )
  27157. {}
  27158. VULKAN_HPP_CONSTEXPR WriteDescriptorSet( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27159. WriteDescriptorSet( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
  27160. : WriteDescriptorSet( *reinterpret_cast<WriteDescriptorSet const *>( &rhs ) )
  27161. {}
  27162. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27163. WriteDescriptorSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_, uint32_t dstBinding_, uint32_t dstArrayElement_, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ = {} )
  27164. : dstSet( dstSet_ ), dstBinding( dstBinding_ ), dstArrayElement( dstArrayElement_ ), descriptorCount( static_cast<uint32_t>( !imageInfo_.empty() ? imageInfo_.size() : !bufferInfo_.empty() ? bufferInfo_.size() : texelBufferView_.size() ) ), descriptorType( descriptorType_ ), pImageInfo( imageInfo_.data() ), pBufferInfo( bufferInfo_.data() ), pTexelBufferView( texelBufferView_.data() )
  27165. {
  27166. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  27167. VULKAN_HPP_ASSERT( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) == 1 );
  27168. #else
  27169. if ( ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1 )
  27170. {
  27171. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::WriteDescriptorSet::WriteDescriptorSet: ( !imageInfo_.empty() + !bufferInfo_.empty() + !texelBufferView_.empty() ) != 1" );
  27172. }
  27173. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  27174. }
  27175. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27176. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27177. VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSet & operator=( WriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27178. WriteDescriptorSet & operator=( VkWriteDescriptorSet const & rhs ) VULKAN_HPP_NOEXCEPT
  27179. {
  27180. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSet const *>( &rhs );
  27181. return *this;
  27182. }
  27183. WriteDescriptorSet & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27184. {
  27185. pNext = pNext_;
  27186. return *this;
  27187. }
  27188. WriteDescriptorSet & setDstSet( VULKAN_HPP_NAMESPACE::DescriptorSet dstSet_ ) VULKAN_HPP_NOEXCEPT
  27189. {
  27190. dstSet = dstSet_;
  27191. return *this;
  27192. }
  27193. WriteDescriptorSet & setDstBinding( uint32_t dstBinding_ ) VULKAN_HPP_NOEXCEPT
  27194. {
  27195. dstBinding = dstBinding_;
  27196. return *this;
  27197. }
  27198. WriteDescriptorSet & setDstArrayElement( uint32_t dstArrayElement_ ) VULKAN_HPP_NOEXCEPT
  27199. {
  27200. dstArrayElement = dstArrayElement_;
  27201. return *this;
  27202. }
  27203. WriteDescriptorSet & setDescriptorCount( uint32_t descriptorCount_ ) VULKAN_HPP_NOEXCEPT
  27204. {
  27205. descriptorCount = descriptorCount_;
  27206. return *this;
  27207. }
  27208. WriteDescriptorSet & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
  27209. {
  27210. descriptorType = descriptorType_;
  27211. return *this;
  27212. }
  27213. WriteDescriptorSet & setPImageInfo( const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo_ ) VULKAN_HPP_NOEXCEPT
  27214. {
  27215. pImageInfo = pImageInfo_;
  27216. return *this;
  27217. }
  27218. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27219. WriteDescriptorSet & setImageInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorImageInfo> const & imageInfo_ ) VULKAN_HPP_NOEXCEPT
  27220. {
  27221. descriptorCount = static_cast<uint32_t>( imageInfo_.size() );
  27222. pImageInfo = imageInfo_.data();
  27223. return *this;
  27224. }
  27225. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27226. WriteDescriptorSet & setPBufferInfo( const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo_ ) VULKAN_HPP_NOEXCEPT
  27227. {
  27228. pBufferInfo = pBufferInfo_;
  27229. return *this;
  27230. }
  27231. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27232. WriteDescriptorSet & setBufferInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo> const & bufferInfo_ ) VULKAN_HPP_NOEXCEPT
  27233. {
  27234. descriptorCount = static_cast<uint32_t>( bufferInfo_.size() );
  27235. pBufferInfo = bufferInfo_.data();
  27236. return *this;
  27237. }
  27238. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27239. WriteDescriptorSet & setPTexelBufferView( const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView_ ) VULKAN_HPP_NOEXCEPT
  27240. {
  27241. pTexelBufferView = pTexelBufferView_;
  27242. return *this;
  27243. }
  27244. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27245. WriteDescriptorSet & setTexelBufferView( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::BufferView> const & texelBufferView_ ) VULKAN_HPP_NOEXCEPT
  27246. {
  27247. descriptorCount = static_cast<uint32_t>( texelBufferView_.size() );
  27248. pTexelBufferView = texelBufferView_.data();
  27249. return *this;
  27250. }
  27251. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27252. operator VkWriteDescriptorSet const&() const VULKAN_HPP_NOEXCEPT
  27253. {
  27254. return *reinterpret_cast<const VkWriteDescriptorSet*>( this );
  27255. }
  27256. operator VkWriteDescriptorSet &() VULKAN_HPP_NOEXCEPT
  27257. {
  27258. return *reinterpret_cast<VkWriteDescriptorSet*>( this );
  27259. }
  27260. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27261. auto operator<=>( WriteDescriptorSet const& ) const = default;
  27262. #else
  27263. bool operator==( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
  27264. {
  27265. return ( sType == rhs.sType )
  27266. && ( pNext == rhs.pNext )
  27267. && ( dstSet == rhs.dstSet )
  27268. && ( dstBinding == rhs.dstBinding )
  27269. && ( dstArrayElement == rhs.dstArrayElement )
  27270. && ( descriptorCount == rhs.descriptorCount )
  27271. && ( descriptorType == rhs.descriptorType )
  27272. && ( pImageInfo == rhs.pImageInfo )
  27273. && ( pBufferInfo == rhs.pBufferInfo )
  27274. && ( pTexelBufferView == rhs.pTexelBufferView );
  27275. }
  27276. bool operator!=( WriteDescriptorSet const& rhs ) const VULKAN_HPP_NOEXCEPT
  27277. {
  27278. return !operator==( rhs );
  27279. }
  27280. #endif
  27281. public:
  27282. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSet;
  27283. const void* pNext = {};
  27284. VULKAN_HPP_NAMESPACE::DescriptorSet dstSet = {};
  27285. uint32_t dstBinding = {};
  27286. uint32_t dstArrayElement = {};
  27287. uint32_t descriptorCount = {};
  27288. VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
  27289. const VULKAN_HPP_NAMESPACE::DescriptorImageInfo* pImageInfo = {};
  27290. const VULKAN_HPP_NAMESPACE::DescriptorBufferInfo* pBufferInfo = {};
  27291. const VULKAN_HPP_NAMESPACE::BufferView* pTexelBufferView = {};
  27292. };
  27293. static_assert( sizeof( WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
  27294. static_assert( std::is_standard_layout<WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
  27295. template <>
  27296. struct CppType<StructureType, StructureType::eWriteDescriptorSet>
  27297. {
  27298. using Type = WriteDescriptorSet;
  27299. };
  27300. class DescriptorUpdateTemplate
  27301. {
  27302. public:
  27303. using CType = VkDescriptorUpdateTemplate;
  27304. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate;
  27305. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate;
  27306. public:
  27307. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate() VULKAN_HPP_NOEXCEPT
  27308. : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
  27309. {}
  27310. VULKAN_HPP_CONSTEXPR DescriptorUpdateTemplate( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27311. : m_descriptorUpdateTemplate(VK_NULL_HANDLE)
  27312. {}
  27313. VULKAN_HPP_TYPESAFE_EXPLICIT DescriptorUpdateTemplate( VkDescriptorUpdateTemplate descriptorUpdateTemplate ) VULKAN_HPP_NOEXCEPT
  27314. : m_descriptorUpdateTemplate( descriptorUpdateTemplate )
  27315. {}
  27316. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  27317. DescriptorUpdateTemplate & operator=(VkDescriptorUpdateTemplate descriptorUpdateTemplate) VULKAN_HPP_NOEXCEPT
  27318. {
  27319. m_descriptorUpdateTemplate = descriptorUpdateTemplate;
  27320. return *this;
  27321. }
  27322. #endif
  27323. DescriptorUpdateTemplate & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27324. {
  27325. m_descriptorUpdateTemplate = VK_NULL_HANDLE;
  27326. return *this;
  27327. }
  27328. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27329. auto operator<=>( DescriptorUpdateTemplate const& ) const = default;
  27330. #else
  27331. bool operator==( DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
  27332. {
  27333. return m_descriptorUpdateTemplate == rhs.m_descriptorUpdateTemplate;
  27334. }
  27335. bool operator!=(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
  27336. {
  27337. return m_descriptorUpdateTemplate != rhs.m_descriptorUpdateTemplate;
  27338. }
  27339. bool operator<(DescriptorUpdateTemplate const & rhs ) const VULKAN_HPP_NOEXCEPT
  27340. {
  27341. return m_descriptorUpdateTemplate < rhs.m_descriptorUpdateTemplate;
  27342. }
  27343. #endif
  27344. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDescriptorUpdateTemplate() const VULKAN_HPP_NOEXCEPT
  27345. {
  27346. return m_descriptorUpdateTemplate;
  27347. }
  27348. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  27349. {
  27350. return m_descriptorUpdateTemplate != VK_NULL_HANDLE;
  27351. }
  27352. bool operator!() const VULKAN_HPP_NOEXCEPT
  27353. {
  27354. return m_descriptorUpdateTemplate == VK_NULL_HANDLE;
  27355. }
  27356. private:
  27357. VkDescriptorUpdateTemplate m_descriptorUpdateTemplate;
  27358. };
  27359. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ), "handle and wrapper have different size!" );
  27360. template <>
  27361. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDescriptorUpdateTemplate>
  27362. {
  27363. using type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  27364. };
  27365. template <>
  27366. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDescriptorUpdateTemplate>
  27367. {
  27368. using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  27369. };
  27370. template <>
  27371. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDescriptorUpdateTemplate>
  27372. {
  27373. using Type = VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
  27374. };
  27375. template <>
  27376. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
  27377. {
  27378. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  27379. };
  27380. using DescriptorUpdateTemplateKHR = DescriptorUpdateTemplate;
  27381. class Event
  27382. {
  27383. public:
  27384. using CType = VkEvent;
  27385. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eEvent;
  27386. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent;
  27387. public:
  27388. VULKAN_HPP_CONSTEXPR Event() VULKAN_HPP_NOEXCEPT
  27389. : m_event(VK_NULL_HANDLE)
  27390. {}
  27391. VULKAN_HPP_CONSTEXPR Event( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27392. : m_event(VK_NULL_HANDLE)
  27393. {}
  27394. VULKAN_HPP_TYPESAFE_EXPLICIT Event( VkEvent event ) VULKAN_HPP_NOEXCEPT
  27395. : m_event( event )
  27396. {}
  27397. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  27398. Event & operator=(VkEvent event) VULKAN_HPP_NOEXCEPT
  27399. {
  27400. m_event = event;
  27401. return *this;
  27402. }
  27403. #endif
  27404. Event & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  27405. {
  27406. m_event = VK_NULL_HANDLE;
  27407. return *this;
  27408. }
  27409. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27410. auto operator<=>( Event const& ) const = default;
  27411. #else
  27412. bool operator==( Event const & rhs ) const VULKAN_HPP_NOEXCEPT
  27413. {
  27414. return m_event == rhs.m_event;
  27415. }
  27416. bool operator!=(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
  27417. {
  27418. return m_event != rhs.m_event;
  27419. }
  27420. bool operator<(Event const & rhs ) const VULKAN_HPP_NOEXCEPT
  27421. {
  27422. return m_event < rhs.m_event;
  27423. }
  27424. #endif
  27425. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkEvent() const VULKAN_HPP_NOEXCEPT
  27426. {
  27427. return m_event;
  27428. }
  27429. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  27430. {
  27431. return m_event != VK_NULL_HANDLE;
  27432. }
  27433. bool operator!() const VULKAN_HPP_NOEXCEPT
  27434. {
  27435. return m_event == VK_NULL_HANDLE;
  27436. }
  27437. private:
  27438. VkEvent m_event;
  27439. };
  27440. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
  27441. template <>
  27442. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eEvent>
  27443. {
  27444. using type = VULKAN_HPP_NAMESPACE::Event;
  27445. };
  27446. template <>
  27447. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eEvent>
  27448. {
  27449. using Type = VULKAN_HPP_NAMESPACE::Event;
  27450. };
  27451. template <>
  27452. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eEvent>
  27453. {
  27454. using Type = VULKAN_HPP_NAMESPACE::Event;
  27455. };
  27456. template <>
  27457. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Event>
  27458. {
  27459. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  27460. };
  27461. struct ImageResolve
  27462. {
  27463. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27464. VULKAN_HPP_CONSTEXPR ImageResolve(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
  27465. : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
  27466. {}
  27467. VULKAN_HPP_CONSTEXPR ImageResolve( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27468. ImageResolve( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
  27469. : ImageResolve( *reinterpret_cast<ImageResolve const *>( &rhs ) )
  27470. {}
  27471. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27472. VULKAN_HPP_CONSTEXPR_14 ImageResolve & operator=( ImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27473. ImageResolve & operator=( VkImageResolve const & rhs ) VULKAN_HPP_NOEXCEPT
  27474. {
  27475. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve const *>( &rhs );
  27476. return *this;
  27477. }
  27478. ImageResolve & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  27479. {
  27480. srcSubresource = srcSubresource_;
  27481. return *this;
  27482. }
  27483. ImageResolve & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
  27484. {
  27485. srcOffset = srcOffset_;
  27486. return *this;
  27487. }
  27488. ImageResolve & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  27489. {
  27490. dstSubresource = dstSubresource_;
  27491. return *this;
  27492. }
  27493. ImageResolve & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
  27494. {
  27495. dstOffset = dstOffset_;
  27496. return *this;
  27497. }
  27498. ImageResolve & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  27499. {
  27500. extent = extent_;
  27501. return *this;
  27502. }
  27503. operator VkImageResolve const&() const VULKAN_HPP_NOEXCEPT
  27504. {
  27505. return *reinterpret_cast<const VkImageResolve*>( this );
  27506. }
  27507. operator VkImageResolve &() VULKAN_HPP_NOEXCEPT
  27508. {
  27509. return *reinterpret_cast<VkImageResolve*>( this );
  27510. }
  27511. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27512. auto operator<=>( ImageResolve const& ) const = default;
  27513. #else
  27514. bool operator==( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
  27515. {
  27516. return ( srcSubresource == rhs.srcSubresource )
  27517. && ( srcOffset == rhs.srcOffset )
  27518. && ( dstSubresource == rhs.dstSubresource )
  27519. && ( dstOffset == rhs.dstOffset )
  27520. && ( extent == rhs.extent );
  27521. }
  27522. bool operator!=( ImageResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
  27523. {
  27524. return !operator==( rhs );
  27525. }
  27526. #endif
  27527. public:
  27528. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  27529. VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
  27530. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  27531. VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
  27532. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  27533. };
  27534. static_assert( sizeof( ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
  27535. static_assert( std::is_standard_layout<ImageResolve>::value, "struct wrapper is not a standard layout!" );
  27536. struct ImageResolve2KHR
  27537. {
  27538. static const bool allowDuplicate = false;
  27539. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageResolve2KHR;
  27540. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27541. VULKAN_HPP_CONSTEXPR ImageResolve2KHR(VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D srcOffset_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource_ = {}, VULKAN_HPP_NAMESPACE::Offset3D dstOffset_ = {}, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}) VULKAN_HPP_NOEXCEPT
  27542. : srcSubresource( srcSubresource_ ), srcOffset( srcOffset_ ), dstSubresource( dstSubresource_ ), dstOffset( dstOffset_ ), extent( extent_ )
  27543. {}
  27544. VULKAN_HPP_CONSTEXPR ImageResolve2KHR( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27545. ImageResolve2KHR( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  27546. : ImageResolve2KHR( *reinterpret_cast<ImageResolve2KHR const *>( &rhs ) )
  27547. {}
  27548. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27549. VULKAN_HPP_CONSTEXPR_14 ImageResolve2KHR & operator=( ImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27550. ImageResolve2KHR & operator=( VkImageResolve2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  27551. {
  27552. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageResolve2KHR const *>( &rhs );
  27553. return *this;
  27554. }
  27555. ImageResolve2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27556. {
  27557. pNext = pNext_;
  27558. return *this;
  27559. }
  27560. ImageResolve2KHR & setSrcSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & srcSubresource_ ) VULKAN_HPP_NOEXCEPT
  27561. {
  27562. srcSubresource = srcSubresource_;
  27563. return *this;
  27564. }
  27565. ImageResolve2KHR & setSrcOffset( VULKAN_HPP_NAMESPACE::Offset3D const & srcOffset_ ) VULKAN_HPP_NOEXCEPT
  27566. {
  27567. srcOffset = srcOffset_;
  27568. return *this;
  27569. }
  27570. ImageResolve2KHR & setDstSubresource( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers const & dstSubresource_ ) VULKAN_HPP_NOEXCEPT
  27571. {
  27572. dstSubresource = dstSubresource_;
  27573. return *this;
  27574. }
  27575. ImageResolve2KHR & setDstOffset( VULKAN_HPP_NAMESPACE::Offset3D const & dstOffset_ ) VULKAN_HPP_NOEXCEPT
  27576. {
  27577. dstOffset = dstOffset_;
  27578. return *this;
  27579. }
  27580. ImageResolve2KHR & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  27581. {
  27582. extent = extent_;
  27583. return *this;
  27584. }
  27585. operator VkImageResolve2KHR const&() const VULKAN_HPP_NOEXCEPT
  27586. {
  27587. return *reinterpret_cast<const VkImageResolve2KHR*>( this );
  27588. }
  27589. operator VkImageResolve2KHR &() VULKAN_HPP_NOEXCEPT
  27590. {
  27591. return *reinterpret_cast<VkImageResolve2KHR*>( this );
  27592. }
  27593. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27594. auto operator<=>( ImageResolve2KHR const& ) const = default;
  27595. #else
  27596. bool operator==( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  27597. {
  27598. return ( sType == rhs.sType )
  27599. && ( pNext == rhs.pNext )
  27600. && ( srcSubresource == rhs.srcSubresource )
  27601. && ( srcOffset == rhs.srcOffset )
  27602. && ( dstSubresource == rhs.dstSubresource )
  27603. && ( dstOffset == rhs.dstOffset )
  27604. && ( extent == rhs.extent );
  27605. }
  27606. bool operator!=( ImageResolve2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  27607. {
  27608. return !operator==( rhs );
  27609. }
  27610. #endif
  27611. public:
  27612. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageResolve2KHR;
  27613. const void* pNext = {};
  27614. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers srcSubresource = {};
  27615. VULKAN_HPP_NAMESPACE::Offset3D srcOffset = {};
  27616. VULKAN_HPP_NAMESPACE::ImageSubresourceLayers dstSubresource = {};
  27617. VULKAN_HPP_NAMESPACE::Offset3D dstOffset = {};
  27618. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  27619. };
  27620. static_assert( sizeof( ImageResolve2KHR ) == sizeof( VkImageResolve2KHR ), "struct and wrapper have different size!" );
  27621. static_assert( std::is_standard_layout<ImageResolve2KHR>::value, "struct wrapper is not a standard layout!" );
  27622. template <>
  27623. struct CppType<StructureType, StructureType::eImageResolve2KHR>
  27624. {
  27625. using Type = ImageResolve2KHR;
  27626. };
  27627. struct ResolveImageInfo2KHR
  27628. {
  27629. static const bool allowDuplicate = false;
  27630. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eResolveImageInfo2KHR;
  27631. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27632. VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR(VULKAN_HPP_NAMESPACE::Image srcImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, VULKAN_HPP_NAMESPACE::Image dstImage_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined, uint32_t regionCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  27633. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( regionCount_ ), pRegions( pRegions_ )
  27634. {}
  27635. VULKAN_HPP_CONSTEXPR ResolveImageInfo2KHR( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27636. ResolveImageInfo2KHR( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  27637. : ResolveImageInfo2KHR( *reinterpret_cast<ResolveImageInfo2KHR const *>( &rhs ) )
  27638. {}
  27639. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27640. ResolveImageInfo2KHR( VULKAN_HPP_NAMESPACE::Image srcImage_, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_, VULKAN_HPP_NAMESPACE::Image dstImage_, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ )
  27641. : srcImage( srcImage_ ), srcImageLayout( srcImageLayout_ ), dstImage( dstImage_ ), dstImageLayout( dstImageLayout_ ), regionCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  27642. {}
  27643. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27644. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27645. VULKAN_HPP_CONSTEXPR_14 ResolveImageInfo2KHR & operator=( ResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27646. ResolveImageInfo2KHR & operator=( VkResolveImageInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  27647. {
  27648. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR const *>( &rhs );
  27649. return *this;
  27650. }
  27651. ResolveImageInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27652. {
  27653. pNext = pNext_;
  27654. return *this;
  27655. }
  27656. ResolveImageInfo2KHR & setSrcImage( VULKAN_HPP_NAMESPACE::Image srcImage_ ) VULKAN_HPP_NOEXCEPT
  27657. {
  27658. srcImage = srcImage_;
  27659. return *this;
  27660. }
  27661. ResolveImageInfo2KHR & setSrcImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout_ ) VULKAN_HPP_NOEXCEPT
  27662. {
  27663. srcImageLayout = srcImageLayout_;
  27664. return *this;
  27665. }
  27666. ResolveImageInfo2KHR & setDstImage( VULKAN_HPP_NAMESPACE::Image dstImage_ ) VULKAN_HPP_NOEXCEPT
  27667. {
  27668. dstImage = dstImage_;
  27669. return *this;
  27670. }
  27671. ResolveImageInfo2KHR & setDstImageLayout( VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout_ ) VULKAN_HPP_NOEXCEPT
  27672. {
  27673. dstImageLayout = dstImageLayout_;
  27674. return *this;
  27675. }
  27676. ResolveImageInfo2KHR & setRegionCount( uint32_t regionCount_ ) VULKAN_HPP_NOEXCEPT
  27677. {
  27678. regionCount = regionCount_;
  27679. return *this;
  27680. }
  27681. ResolveImageInfo2KHR & setPRegions( const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  27682. {
  27683. pRegions = pRegions_;
  27684. return *this;
  27685. }
  27686. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27687. ResolveImageInfo2KHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageResolve2KHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  27688. {
  27689. regionCount = static_cast<uint32_t>( regions_.size() );
  27690. pRegions = regions_.data();
  27691. return *this;
  27692. }
  27693. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  27694. operator VkResolveImageInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  27695. {
  27696. return *reinterpret_cast<const VkResolveImageInfo2KHR*>( this );
  27697. }
  27698. operator VkResolveImageInfo2KHR &() VULKAN_HPP_NOEXCEPT
  27699. {
  27700. return *reinterpret_cast<VkResolveImageInfo2KHR*>( this );
  27701. }
  27702. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27703. auto operator<=>( ResolveImageInfo2KHR const& ) const = default;
  27704. #else
  27705. bool operator==( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  27706. {
  27707. return ( sType == rhs.sType )
  27708. && ( pNext == rhs.pNext )
  27709. && ( srcImage == rhs.srcImage )
  27710. && ( srcImageLayout == rhs.srcImageLayout )
  27711. && ( dstImage == rhs.dstImage )
  27712. && ( dstImageLayout == rhs.dstImageLayout )
  27713. && ( regionCount == rhs.regionCount )
  27714. && ( pRegions == rhs.pRegions );
  27715. }
  27716. bool operator!=( ResolveImageInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  27717. {
  27718. return !operator==( rhs );
  27719. }
  27720. #endif
  27721. public:
  27722. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eResolveImageInfo2KHR;
  27723. const void* pNext = {};
  27724. VULKAN_HPP_NAMESPACE::Image srcImage = {};
  27725. VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  27726. VULKAN_HPP_NAMESPACE::Image dstImage = {};
  27727. VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  27728. uint32_t regionCount = {};
  27729. const VULKAN_HPP_NAMESPACE::ImageResolve2KHR* pRegions = {};
  27730. };
  27731. static_assert( sizeof( ResolveImageInfo2KHR ) == sizeof( VkResolveImageInfo2KHR ), "struct and wrapper have different size!" );
  27732. static_assert( std::is_standard_layout<ResolveImageInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  27733. template <>
  27734. struct CppType<StructureType, StructureType::eResolveImageInfo2KHR>
  27735. {
  27736. using Type = ResolveImageInfo2KHR;
  27737. };
  27738. struct PerformanceMarkerInfoINTEL
  27739. {
  27740. static const bool allowDuplicate = false;
  27741. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceMarkerInfoINTEL;
  27742. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27743. VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL(uint64_t marker_ = {}) VULKAN_HPP_NOEXCEPT
  27744. : marker( marker_ )
  27745. {}
  27746. VULKAN_HPP_CONSTEXPR PerformanceMarkerInfoINTEL( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27747. PerformanceMarkerInfoINTEL( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27748. : PerformanceMarkerInfoINTEL( *reinterpret_cast<PerformanceMarkerInfoINTEL const *>( &rhs ) )
  27749. {}
  27750. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27751. VULKAN_HPP_CONSTEXPR_14 PerformanceMarkerInfoINTEL & operator=( PerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27752. PerformanceMarkerInfoINTEL & operator=( VkPerformanceMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27753. {
  27754. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL const *>( &rhs );
  27755. return *this;
  27756. }
  27757. PerformanceMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27758. {
  27759. pNext = pNext_;
  27760. return *this;
  27761. }
  27762. PerformanceMarkerInfoINTEL & setMarker( uint64_t marker_ ) VULKAN_HPP_NOEXCEPT
  27763. {
  27764. marker = marker_;
  27765. return *this;
  27766. }
  27767. operator VkPerformanceMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  27768. {
  27769. return *reinterpret_cast<const VkPerformanceMarkerInfoINTEL*>( this );
  27770. }
  27771. operator VkPerformanceMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
  27772. {
  27773. return *reinterpret_cast<VkPerformanceMarkerInfoINTEL*>( this );
  27774. }
  27775. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27776. auto operator<=>( PerformanceMarkerInfoINTEL const& ) const = default;
  27777. #else
  27778. bool operator==( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27779. {
  27780. return ( sType == rhs.sType )
  27781. && ( pNext == rhs.pNext )
  27782. && ( marker == rhs.marker );
  27783. }
  27784. bool operator!=( PerformanceMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27785. {
  27786. return !operator==( rhs );
  27787. }
  27788. #endif
  27789. public:
  27790. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceMarkerInfoINTEL;
  27791. const void* pNext = {};
  27792. uint64_t marker = {};
  27793. };
  27794. static_assert( sizeof( PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ), "struct and wrapper have different size!" );
  27795. static_assert( std::is_standard_layout<PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  27796. template <>
  27797. struct CppType<StructureType, StructureType::ePerformanceMarkerInfoINTEL>
  27798. {
  27799. using Type = PerformanceMarkerInfoINTEL;
  27800. };
  27801. struct PerformanceOverrideInfoINTEL
  27802. {
  27803. static const bool allowDuplicate = false;
  27804. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceOverrideInfoINTEL;
  27805. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27806. VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL(VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware, VULKAN_HPP_NAMESPACE::Bool32 enable_ = {}, uint64_t parameter_ = {}) VULKAN_HPP_NOEXCEPT
  27807. : type( type_ ), enable( enable_ ), parameter( parameter_ )
  27808. {}
  27809. VULKAN_HPP_CONSTEXPR PerformanceOverrideInfoINTEL( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27810. PerformanceOverrideInfoINTEL( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27811. : PerformanceOverrideInfoINTEL( *reinterpret_cast<PerformanceOverrideInfoINTEL const *>( &rhs ) )
  27812. {}
  27813. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27814. VULKAN_HPP_CONSTEXPR_14 PerformanceOverrideInfoINTEL & operator=( PerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27815. PerformanceOverrideInfoINTEL & operator=( VkPerformanceOverrideInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27816. {
  27817. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL const *>( &rhs );
  27818. return *this;
  27819. }
  27820. PerformanceOverrideInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27821. {
  27822. pNext = pNext_;
  27823. return *this;
  27824. }
  27825. PerformanceOverrideInfoINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
  27826. {
  27827. type = type_;
  27828. return *this;
  27829. }
  27830. PerformanceOverrideInfoINTEL & setEnable( VULKAN_HPP_NAMESPACE::Bool32 enable_ ) VULKAN_HPP_NOEXCEPT
  27831. {
  27832. enable = enable_;
  27833. return *this;
  27834. }
  27835. PerformanceOverrideInfoINTEL & setParameter( uint64_t parameter_ ) VULKAN_HPP_NOEXCEPT
  27836. {
  27837. parameter = parameter_;
  27838. return *this;
  27839. }
  27840. operator VkPerformanceOverrideInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  27841. {
  27842. return *reinterpret_cast<const VkPerformanceOverrideInfoINTEL*>( this );
  27843. }
  27844. operator VkPerformanceOverrideInfoINTEL &() VULKAN_HPP_NOEXCEPT
  27845. {
  27846. return *reinterpret_cast<VkPerformanceOverrideInfoINTEL*>( this );
  27847. }
  27848. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27849. auto operator<=>( PerformanceOverrideInfoINTEL const& ) const = default;
  27850. #else
  27851. bool operator==( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27852. {
  27853. return ( sType == rhs.sType )
  27854. && ( pNext == rhs.pNext )
  27855. && ( type == rhs.type )
  27856. && ( enable == rhs.enable )
  27857. && ( parameter == rhs.parameter );
  27858. }
  27859. bool operator!=( PerformanceOverrideInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27860. {
  27861. return !operator==( rhs );
  27862. }
  27863. #endif
  27864. public:
  27865. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceOverrideInfoINTEL;
  27866. const void* pNext = {};
  27867. VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL::eNullHardware;
  27868. VULKAN_HPP_NAMESPACE::Bool32 enable = {};
  27869. uint64_t parameter = {};
  27870. };
  27871. static_assert( sizeof( PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ), "struct and wrapper have different size!" );
  27872. static_assert( std::is_standard_layout<PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  27873. template <>
  27874. struct CppType<StructureType, StructureType::ePerformanceOverrideInfoINTEL>
  27875. {
  27876. using Type = PerformanceOverrideInfoINTEL;
  27877. };
  27878. struct PerformanceStreamMarkerInfoINTEL
  27879. {
  27880. static const bool allowDuplicate = false;
  27881. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceStreamMarkerInfoINTEL;
  27882. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27883. VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL(uint32_t marker_ = {}) VULKAN_HPP_NOEXCEPT
  27884. : marker( marker_ )
  27885. {}
  27886. VULKAN_HPP_CONSTEXPR PerformanceStreamMarkerInfoINTEL( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27887. PerformanceStreamMarkerInfoINTEL( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27888. : PerformanceStreamMarkerInfoINTEL( *reinterpret_cast<PerformanceStreamMarkerInfoINTEL const *>( &rhs ) )
  27889. {}
  27890. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27891. VULKAN_HPP_CONSTEXPR_14 PerformanceStreamMarkerInfoINTEL & operator=( PerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27892. PerformanceStreamMarkerInfoINTEL & operator=( VkPerformanceStreamMarkerInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  27893. {
  27894. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL const *>( &rhs );
  27895. return *this;
  27896. }
  27897. PerformanceStreamMarkerInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  27898. {
  27899. pNext = pNext_;
  27900. return *this;
  27901. }
  27902. PerformanceStreamMarkerInfoINTEL & setMarker( uint32_t marker_ ) VULKAN_HPP_NOEXCEPT
  27903. {
  27904. marker = marker_;
  27905. return *this;
  27906. }
  27907. operator VkPerformanceStreamMarkerInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  27908. {
  27909. return *reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL*>( this );
  27910. }
  27911. operator VkPerformanceStreamMarkerInfoINTEL &() VULKAN_HPP_NOEXCEPT
  27912. {
  27913. return *reinterpret_cast<VkPerformanceStreamMarkerInfoINTEL*>( this );
  27914. }
  27915. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27916. auto operator<=>( PerformanceStreamMarkerInfoINTEL const& ) const = default;
  27917. #else
  27918. bool operator==( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27919. {
  27920. return ( sType == rhs.sType )
  27921. && ( pNext == rhs.pNext )
  27922. && ( marker == rhs.marker );
  27923. }
  27924. bool operator!=( PerformanceStreamMarkerInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  27925. {
  27926. return !operator==( rhs );
  27927. }
  27928. #endif
  27929. public:
  27930. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceStreamMarkerInfoINTEL;
  27931. const void* pNext = {};
  27932. uint32_t marker = {};
  27933. };
  27934. static_assert( sizeof( PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ), "struct and wrapper have different size!" );
  27935. static_assert( std::is_standard_layout<PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  27936. template <>
  27937. struct CppType<StructureType, StructureType::ePerformanceStreamMarkerInfoINTEL>
  27938. {
  27939. using Type = PerformanceStreamMarkerInfoINTEL;
  27940. };
  27941. struct Viewport
  27942. {
  27943. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27944. VULKAN_HPP_CONSTEXPR Viewport(float x_ = {}, float y_ = {}, float width_ = {}, float height_ = {}, float minDepth_ = {}, float maxDepth_ = {}) VULKAN_HPP_NOEXCEPT
  27945. : x( x_ ), y( y_ ), width( width_ ), height( height_ ), minDepth( minDepth_ ), maxDepth( maxDepth_ )
  27946. {}
  27947. VULKAN_HPP_CONSTEXPR Viewport( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27948. Viewport( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
  27949. : Viewport( *reinterpret_cast<Viewport const *>( &rhs ) )
  27950. {}
  27951. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  27952. VULKAN_HPP_CONSTEXPR_14 Viewport & operator=( Viewport const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  27953. Viewport & operator=( VkViewport const & rhs ) VULKAN_HPP_NOEXCEPT
  27954. {
  27955. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Viewport const *>( &rhs );
  27956. return *this;
  27957. }
  27958. Viewport & setX( float x_ ) VULKAN_HPP_NOEXCEPT
  27959. {
  27960. x = x_;
  27961. return *this;
  27962. }
  27963. Viewport & setY( float y_ ) VULKAN_HPP_NOEXCEPT
  27964. {
  27965. y = y_;
  27966. return *this;
  27967. }
  27968. Viewport & setWidth( float width_ ) VULKAN_HPP_NOEXCEPT
  27969. {
  27970. width = width_;
  27971. return *this;
  27972. }
  27973. Viewport & setHeight( float height_ ) VULKAN_HPP_NOEXCEPT
  27974. {
  27975. height = height_;
  27976. return *this;
  27977. }
  27978. Viewport & setMinDepth( float minDepth_ ) VULKAN_HPP_NOEXCEPT
  27979. {
  27980. minDepth = minDepth_;
  27981. return *this;
  27982. }
  27983. Viewport & setMaxDepth( float maxDepth_ ) VULKAN_HPP_NOEXCEPT
  27984. {
  27985. maxDepth = maxDepth_;
  27986. return *this;
  27987. }
  27988. operator VkViewport const&() const VULKAN_HPP_NOEXCEPT
  27989. {
  27990. return *reinterpret_cast<const VkViewport*>( this );
  27991. }
  27992. operator VkViewport &() VULKAN_HPP_NOEXCEPT
  27993. {
  27994. return *reinterpret_cast<VkViewport*>( this );
  27995. }
  27996. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  27997. auto operator<=>( Viewport const& ) const = default;
  27998. #else
  27999. bool operator==( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
  28000. {
  28001. return ( x == rhs.x )
  28002. && ( y == rhs.y )
  28003. && ( width == rhs.width )
  28004. && ( height == rhs.height )
  28005. && ( minDepth == rhs.minDepth )
  28006. && ( maxDepth == rhs.maxDepth );
  28007. }
  28008. bool operator!=( Viewport const& rhs ) const VULKAN_HPP_NOEXCEPT
  28009. {
  28010. return !operator==( rhs );
  28011. }
  28012. #endif
  28013. public:
  28014. float x = {};
  28015. float y = {};
  28016. float width = {};
  28017. float height = {};
  28018. float minDepth = {};
  28019. float maxDepth = {};
  28020. };
  28021. static_assert( sizeof( Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
  28022. static_assert( std::is_standard_layout<Viewport>::value, "struct wrapper is not a standard layout!" );
  28023. struct ShadingRatePaletteNV
  28024. {
  28025. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28026. VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV(uint32_t shadingRatePaletteEntryCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ = {}) VULKAN_HPP_NOEXCEPT
  28027. : shadingRatePaletteEntryCount( shadingRatePaletteEntryCount_ ), pShadingRatePaletteEntries( pShadingRatePaletteEntries_ )
  28028. {}
  28029. VULKAN_HPP_CONSTEXPR ShadingRatePaletteNV( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28030. ShadingRatePaletteNV( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
  28031. : ShadingRatePaletteNV( *reinterpret_cast<ShadingRatePaletteNV const *>( &rhs ) )
  28032. {}
  28033. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  28034. ShadingRatePaletteNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ )
  28035. : shadingRatePaletteEntryCount( static_cast<uint32_t>( shadingRatePaletteEntries_.size() ) ), pShadingRatePaletteEntries( shadingRatePaletteEntries_.data() )
  28036. {}
  28037. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  28038. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28039. VULKAN_HPP_CONSTEXPR_14 ShadingRatePaletteNV & operator=( ShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28040. ShadingRatePaletteNV & operator=( VkShadingRatePaletteNV const & rhs ) VULKAN_HPP_NOEXCEPT
  28041. {
  28042. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV const *>( &rhs );
  28043. return *this;
  28044. }
  28045. ShadingRatePaletteNV & setShadingRatePaletteEntryCount( uint32_t shadingRatePaletteEntryCount_ ) VULKAN_HPP_NOEXCEPT
  28046. {
  28047. shadingRatePaletteEntryCount = shadingRatePaletteEntryCount_;
  28048. return *this;
  28049. }
  28050. ShadingRatePaletteNV & setPShadingRatePaletteEntries( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
  28051. {
  28052. pShadingRatePaletteEntries = pShadingRatePaletteEntries_;
  28053. return *this;
  28054. }
  28055. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  28056. ShadingRatePaletteNV & setShadingRatePaletteEntries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV> const & shadingRatePaletteEntries_ ) VULKAN_HPP_NOEXCEPT
  28057. {
  28058. shadingRatePaletteEntryCount = static_cast<uint32_t>( shadingRatePaletteEntries_.size() );
  28059. pShadingRatePaletteEntries = shadingRatePaletteEntries_.data();
  28060. return *this;
  28061. }
  28062. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  28063. operator VkShadingRatePaletteNV const&() const VULKAN_HPP_NOEXCEPT
  28064. {
  28065. return *reinterpret_cast<const VkShadingRatePaletteNV*>( this );
  28066. }
  28067. operator VkShadingRatePaletteNV &() VULKAN_HPP_NOEXCEPT
  28068. {
  28069. return *reinterpret_cast<VkShadingRatePaletteNV*>( this );
  28070. }
  28071. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28072. auto operator<=>( ShadingRatePaletteNV const& ) const = default;
  28073. #else
  28074. bool operator==( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  28075. {
  28076. return ( shadingRatePaletteEntryCount == rhs.shadingRatePaletteEntryCount )
  28077. && ( pShadingRatePaletteEntries == rhs.pShadingRatePaletteEntries );
  28078. }
  28079. bool operator!=( ShadingRatePaletteNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  28080. {
  28081. return !operator==( rhs );
  28082. }
  28083. #endif
  28084. public:
  28085. uint32_t shadingRatePaletteEntryCount = {};
  28086. const VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV* pShadingRatePaletteEntries = {};
  28087. };
  28088. static_assert( sizeof( ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
  28089. static_assert( std::is_standard_layout<ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
  28090. struct ViewportWScalingNV
  28091. {
  28092. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28093. VULKAN_HPP_CONSTEXPR ViewportWScalingNV(float xcoeff_ = {}, float ycoeff_ = {}) VULKAN_HPP_NOEXCEPT
  28094. : xcoeff( xcoeff_ ), ycoeff( ycoeff_ )
  28095. {}
  28096. VULKAN_HPP_CONSTEXPR ViewportWScalingNV( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28097. ViewportWScalingNV( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
  28098. : ViewportWScalingNV( *reinterpret_cast<ViewportWScalingNV const *>( &rhs ) )
  28099. {}
  28100. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28101. VULKAN_HPP_CONSTEXPR_14 ViewportWScalingNV & operator=( ViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28102. ViewportWScalingNV & operator=( VkViewportWScalingNV const & rhs ) VULKAN_HPP_NOEXCEPT
  28103. {
  28104. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportWScalingNV const *>( &rhs );
  28105. return *this;
  28106. }
  28107. ViewportWScalingNV & setXcoeff( float xcoeff_ ) VULKAN_HPP_NOEXCEPT
  28108. {
  28109. xcoeff = xcoeff_;
  28110. return *this;
  28111. }
  28112. ViewportWScalingNV & setYcoeff( float ycoeff_ ) VULKAN_HPP_NOEXCEPT
  28113. {
  28114. ycoeff = ycoeff_;
  28115. return *this;
  28116. }
  28117. operator VkViewportWScalingNV const&() const VULKAN_HPP_NOEXCEPT
  28118. {
  28119. return *reinterpret_cast<const VkViewportWScalingNV*>( this );
  28120. }
  28121. operator VkViewportWScalingNV &() VULKAN_HPP_NOEXCEPT
  28122. {
  28123. return *reinterpret_cast<VkViewportWScalingNV*>( this );
  28124. }
  28125. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28126. auto operator<=>( ViewportWScalingNV const& ) const = default;
  28127. #else
  28128. bool operator==( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  28129. {
  28130. return ( xcoeff == rhs.xcoeff )
  28131. && ( ycoeff == rhs.ycoeff );
  28132. }
  28133. bool operator!=( ViewportWScalingNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  28134. {
  28135. return !operator==( rhs );
  28136. }
  28137. #endif
  28138. public:
  28139. float xcoeff = {};
  28140. float ycoeff = {};
  28141. };
  28142. static_assert( sizeof( ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
  28143. static_assert( std::is_standard_layout<ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
  28144. struct StridedDeviceAddressRegionKHR
  28145. {
  28146. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28147. VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize stride_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  28148. : deviceAddress( deviceAddress_ ), stride( stride_ ), size( size_ )
  28149. {}
  28150. VULKAN_HPP_CONSTEXPR StridedDeviceAddressRegionKHR( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28151. StridedDeviceAddressRegionKHR( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  28152. : StridedDeviceAddressRegionKHR( *reinterpret_cast<StridedDeviceAddressRegionKHR const *>( &rhs ) )
  28153. {}
  28154. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28155. VULKAN_HPP_CONSTEXPR_14 StridedDeviceAddressRegionKHR & operator=( StridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28156. StridedDeviceAddressRegionKHR & operator=( VkStridedDeviceAddressRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  28157. {
  28158. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR const *>( &rhs );
  28159. return *this;
  28160. }
  28161. StridedDeviceAddressRegionKHR & setDeviceAddress( VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ ) VULKAN_HPP_NOEXCEPT
  28162. {
  28163. deviceAddress = deviceAddress_;
  28164. return *this;
  28165. }
  28166. StridedDeviceAddressRegionKHR & setStride( VULKAN_HPP_NAMESPACE::DeviceSize stride_ ) VULKAN_HPP_NOEXCEPT
  28167. {
  28168. stride = stride_;
  28169. return *this;
  28170. }
  28171. StridedDeviceAddressRegionKHR & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  28172. {
  28173. size = size_;
  28174. return *this;
  28175. }
  28176. operator VkStridedDeviceAddressRegionKHR const&() const VULKAN_HPP_NOEXCEPT
  28177. {
  28178. return *reinterpret_cast<const VkStridedDeviceAddressRegionKHR*>( this );
  28179. }
  28180. operator VkStridedDeviceAddressRegionKHR &() VULKAN_HPP_NOEXCEPT
  28181. {
  28182. return *reinterpret_cast<VkStridedDeviceAddressRegionKHR*>( this );
  28183. }
  28184. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28185. auto operator<=>( StridedDeviceAddressRegionKHR const& ) const = default;
  28186. #else
  28187. bool operator==( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  28188. {
  28189. return ( deviceAddress == rhs.deviceAddress )
  28190. && ( stride == rhs.stride )
  28191. && ( size == rhs.size );
  28192. }
  28193. bool operator!=( StridedDeviceAddressRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  28194. {
  28195. return !operator==( rhs );
  28196. }
  28197. #endif
  28198. public:
  28199. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  28200. VULKAN_HPP_NAMESPACE::DeviceSize stride = {};
  28201. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  28202. };
  28203. static_assert( sizeof( StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ), "struct and wrapper have different size!" );
  28204. static_assert( std::is_standard_layout<StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
  28205. class CommandBuffer
  28206. {
  28207. public:
  28208. using CType = VkCommandBuffer;
  28209. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer;
  28210. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer;
  28211. public:
  28212. VULKAN_HPP_CONSTEXPR CommandBuffer() VULKAN_HPP_NOEXCEPT
  28213. : m_commandBuffer(VK_NULL_HANDLE)
  28214. {}
  28215. VULKAN_HPP_CONSTEXPR CommandBuffer( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28216. : m_commandBuffer(VK_NULL_HANDLE)
  28217. {}
  28218. VULKAN_HPP_TYPESAFE_EXPLICIT CommandBuffer( VkCommandBuffer commandBuffer ) VULKAN_HPP_NOEXCEPT
  28219. : m_commandBuffer( commandBuffer )
  28220. {}
  28221. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  28222. CommandBuffer & operator=(VkCommandBuffer commandBuffer) VULKAN_HPP_NOEXCEPT
  28223. {
  28224. m_commandBuffer = commandBuffer;
  28225. return *this;
  28226. }
  28227. #endif
  28228. CommandBuffer & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28229. {
  28230. m_commandBuffer = VK_NULL_HANDLE;
  28231. return *this;
  28232. }
  28233. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28234. auto operator<=>( CommandBuffer const& ) const = default;
  28235. #else
  28236. bool operator==( CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  28237. {
  28238. return m_commandBuffer == rhs.m_commandBuffer;
  28239. }
  28240. bool operator!=(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  28241. {
  28242. return m_commandBuffer != rhs.m_commandBuffer;
  28243. }
  28244. bool operator<(CommandBuffer const & rhs ) const VULKAN_HPP_NOEXCEPT
  28245. {
  28246. return m_commandBuffer < rhs.m_commandBuffer;
  28247. }
  28248. #endif
  28249. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28250. VULKAN_HPP_NODISCARD Result begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28251. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28252. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28253. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28254. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28255. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28256. void beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28258. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28259. void beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28260. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28261. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28262. void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28263. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28264. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28265. void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28266. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28267. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28268. void beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28269. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28270. void beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28271. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28272. void beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28273. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28274. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28275. void beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28276. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28277. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28278. void beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28279. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28280. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28281. void beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28282. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28283. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28284. void beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28285. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28286. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28287. void beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28288. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28289. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28290. void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28291. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28292. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28293. void beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28294. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28295. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28296. void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28297. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28298. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28299. void bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28300. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28301. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28302. void bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28303. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28304. void bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28305. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28306. void bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28307. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28308. void bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28309. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28310. void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28311. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28312. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28313. void bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28314. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28315. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28316. void bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28317. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28318. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28319. void bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28320. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28321. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28322. void bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28323. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28324. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28325. void bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28326. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28327. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28328. void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28329. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28330. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28331. void blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28332. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28333. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28334. void blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28335. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28336. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28337. void blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28338. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28339. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28340. void buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28341. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28342. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28343. void buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28344. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28345. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28346. void buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28347. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28348. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28349. void buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28350. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28351. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28352. void buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28353. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28354. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28355. void buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28356. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28357. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28358. void clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28359. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28360. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28361. void clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28362. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28363. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28364. void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28365. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28366. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28367. void clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28368. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28369. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28370. void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28371. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28372. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28373. void clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28374. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28375. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28376. void copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28377. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28378. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28379. void copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28380. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28381. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28382. void copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28383. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28384. void copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28385. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28386. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28387. void copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28388. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28389. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28390. void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28391. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28392. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28393. void copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28394. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28395. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28396. void copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28397. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28398. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28399. void copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28400. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28401. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28402. void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28403. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28404. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28405. void copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28406. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28407. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28408. void copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28409. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28410. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28411. void copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28412. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28413. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28414. void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28415. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28416. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28417. void copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28418. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28419. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28420. void copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28421. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28422. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28423. void copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28424. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28425. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28426. void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28427. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28428. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28429. void copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28430. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28431. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28432. void copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28433. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28434. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28435. void copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28436. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28437. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28438. void copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28439. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28440. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28441. void copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28442. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28443. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28444. void copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28445. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28446. void debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28447. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28448. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28449. void debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28450. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28451. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28452. void debugMarkerEndEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28453. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28454. void debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28455. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28456. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28457. void debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28458. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28459. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28460. void dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28461. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28462. void dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28463. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28464. void dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28465. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28466. void dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28467. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28468. void draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28469. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28470. void drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28471. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28472. void drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28473. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28474. void drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28475. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28476. void drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28477. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28478. void drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28479. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28480. void drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28481. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28482. void drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28483. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28484. void drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28485. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28486. void drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28487. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28488. void drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28489. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28490. void drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28491. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28492. void drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28493. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28494. void drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28495. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28496. void endConditionalRenderingEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28497. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28498. void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28499. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28500. void endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28501. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28502. void endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28503. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28504. void endRenderPass( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28505. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28506. void endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28507. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28508. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28509. void endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28510. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28511. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28512. void endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28513. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28514. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28515. void endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28516. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28517. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28518. void endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28519. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28520. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28521. void endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  28522. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28523. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28524. void executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28525. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28526. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28527. void executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28528. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28529. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28530. void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28531. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28532. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28533. void executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28534. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28535. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28536. void fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28537. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28538. void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28539. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28540. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28541. void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28542. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28543. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28544. void nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28545. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28546. void nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28547. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28548. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28549. void nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28550. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28551. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28552. void nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28553. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28554. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28555. void nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28556. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28557. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28558. void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28559. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28560. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28561. void pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28562. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28563. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28564. void preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28565. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28566. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28567. void preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28568. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28569. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28570. void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28571. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28572. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28573. void pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28574. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28575. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28576. void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28577. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28578. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28579. void pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28580. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28581. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28582. void pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28583. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28584. void resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28585. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28586. void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28587. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28588. void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28589. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28590. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28591. void resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28592. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28593. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28594. void resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28595. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28596. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28597. void resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28598. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28599. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28600. void setBlendConstants( const float blendConstants[4], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28601. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28602. void setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28603. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28604. void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28605. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28606. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28607. void setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28608. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28609. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28610. void setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28611. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28612. void setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28613. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28614. void setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28615. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28616. void setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28617. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28618. void setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28619. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28620. void setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28621. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28622. void setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28623. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28624. void setDeviceMask( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28625. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28626. void setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28627. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28628. void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28629. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28630. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28631. void setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28632. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28633. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28634. void setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28635. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28636. void setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28638. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28639. void setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28640. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28641. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28642. void setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28643. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28644. void setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28645. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28646. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28647. void setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28648. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28649. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28650. void setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28651. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28652. void setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28653. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28654. void setLineWidth( float lineWidth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28655. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28656. VULKAN_HPP_NODISCARD Result setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28657. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28658. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28659. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28660. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28661. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28662. VULKAN_HPP_NODISCARD Result setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28663. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28664. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28665. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28666. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28667. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28668. VULKAN_HPP_NODISCARD Result setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28669. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28670. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28671. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28672. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28673. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28674. void setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28675. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28676. void setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28677. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28678. void setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28679. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28680. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28681. void setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28682. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28683. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28684. void setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28685. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28686. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28687. void setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28688. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28689. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28690. void setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28691. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28692. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28693. void setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28694. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28695. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28696. void setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28697. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28698. void setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28699. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28700. void setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28701. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28702. void setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28703. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28704. void setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28705. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28706. void setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28707. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28708. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28709. void setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28710. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28711. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28712. void setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28713. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28714. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28715. void setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28716. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28717. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28718. void setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28719. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28720. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28721. void setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28722. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28723. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28724. void setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28725. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28726. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28727. void setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28728. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28729. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28730. void traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28731. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28732. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28733. void traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28734. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28735. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28736. void traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28737. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28738. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28739. void traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28740. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28741. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28742. void traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28743. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28744. void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28745. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28746. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28747. void updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28748. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28749. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28750. void waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28751. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28752. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28753. void waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28754. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28755. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28756. void writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28757. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28758. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28759. void writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28760. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28761. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28762. void writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28763. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28764. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28765. void writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28766. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28767. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28768. void writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28769. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28770. void writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28771. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28772. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28773. VULKAN_HPP_NODISCARD Result end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28774. #else
  28775. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28776. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type end( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28777. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28778. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  28779. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28780. VULKAN_HPP_NODISCARD Result reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  28781. #else
  28782. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  28783. typename ResultValueType<void>::type reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  28784. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  28785. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkCommandBuffer() const VULKAN_HPP_NOEXCEPT
  28786. {
  28787. return m_commandBuffer;
  28788. }
  28789. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  28790. {
  28791. return m_commandBuffer != VK_NULL_HANDLE;
  28792. }
  28793. bool operator!() const VULKAN_HPP_NOEXCEPT
  28794. {
  28795. return m_commandBuffer == VK_NULL_HANDLE;
  28796. }
  28797. private:
  28798. VkCommandBuffer m_commandBuffer;
  28799. };
  28800. static_assert( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
  28801. template <>
  28802. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eCommandBuffer>
  28803. {
  28804. using type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  28805. };
  28806. template <>
  28807. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eCommandBuffer>
  28808. {
  28809. using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  28810. };
  28811. template <>
  28812. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eCommandBuffer>
  28813. {
  28814. using Type = VULKAN_HPP_NAMESPACE::CommandBuffer;
  28815. };
  28816. template <>
  28817. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::CommandBuffer>
  28818. {
  28819. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  28820. };
  28821. struct MemoryAllocateInfo
  28822. {
  28823. static const bool allowDuplicate = false;
  28824. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateInfo;
  28825. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28826. VULKAN_HPP_CONSTEXPR MemoryAllocateInfo(VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ = {}, uint32_t memoryTypeIndex_ = {}) VULKAN_HPP_NOEXCEPT
  28827. : allocationSize( allocationSize_ ), memoryTypeIndex( memoryTypeIndex_ )
  28828. {}
  28829. VULKAN_HPP_CONSTEXPR MemoryAllocateInfo( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28830. MemoryAllocateInfo( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  28831. : MemoryAllocateInfo( *reinterpret_cast<MemoryAllocateInfo const *>( &rhs ) )
  28832. {}
  28833. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  28834. VULKAN_HPP_CONSTEXPR_14 MemoryAllocateInfo & operator=( MemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  28835. MemoryAllocateInfo & operator=( VkMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  28836. {
  28837. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo const *>( &rhs );
  28838. return *this;
  28839. }
  28840. MemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  28841. {
  28842. pNext = pNext_;
  28843. return *this;
  28844. }
  28845. MemoryAllocateInfo & setAllocationSize( VULKAN_HPP_NAMESPACE::DeviceSize allocationSize_ ) VULKAN_HPP_NOEXCEPT
  28846. {
  28847. allocationSize = allocationSize_;
  28848. return *this;
  28849. }
  28850. MemoryAllocateInfo & setMemoryTypeIndex( uint32_t memoryTypeIndex_ ) VULKAN_HPP_NOEXCEPT
  28851. {
  28852. memoryTypeIndex = memoryTypeIndex_;
  28853. return *this;
  28854. }
  28855. operator VkMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  28856. {
  28857. return *reinterpret_cast<const VkMemoryAllocateInfo*>( this );
  28858. }
  28859. operator VkMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
  28860. {
  28861. return *reinterpret_cast<VkMemoryAllocateInfo*>( this );
  28862. }
  28863. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28864. auto operator<=>( MemoryAllocateInfo const& ) const = default;
  28865. #else
  28866. bool operator==( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  28867. {
  28868. return ( sType == rhs.sType )
  28869. && ( pNext == rhs.pNext )
  28870. && ( allocationSize == rhs.allocationSize )
  28871. && ( memoryTypeIndex == rhs.memoryTypeIndex );
  28872. }
  28873. bool operator!=( MemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  28874. {
  28875. return !operator==( rhs );
  28876. }
  28877. #endif
  28878. public:
  28879. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateInfo;
  28880. const void* pNext = {};
  28881. VULKAN_HPP_NAMESPACE::DeviceSize allocationSize = {};
  28882. uint32_t memoryTypeIndex = {};
  28883. };
  28884. static_assert( sizeof( MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
  28885. static_assert( std::is_standard_layout<MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  28886. template <>
  28887. struct CppType<StructureType, StructureType::eMemoryAllocateInfo>
  28888. {
  28889. using Type = MemoryAllocateInfo;
  28890. };
  28891. class DeferredOperationKHR
  28892. {
  28893. public:
  28894. using CType = VkDeferredOperationKHR;
  28895. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR;
  28896. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  28897. public:
  28898. VULKAN_HPP_CONSTEXPR DeferredOperationKHR() VULKAN_HPP_NOEXCEPT
  28899. : m_deferredOperationKHR(VK_NULL_HANDLE)
  28900. {}
  28901. VULKAN_HPP_CONSTEXPR DeferredOperationKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28902. : m_deferredOperationKHR(VK_NULL_HANDLE)
  28903. {}
  28904. VULKAN_HPP_TYPESAFE_EXPLICIT DeferredOperationKHR( VkDeferredOperationKHR deferredOperationKHR ) VULKAN_HPP_NOEXCEPT
  28905. : m_deferredOperationKHR( deferredOperationKHR )
  28906. {}
  28907. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  28908. DeferredOperationKHR & operator=(VkDeferredOperationKHR deferredOperationKHR) VULKAN_HPP_NOEXCEPT
  28909. {
  28910. m_deferredOperationKHR = deferredOperationKHR;
  28911. return *this;
  28912. }
  28913. #endif
  28914. DeferredOperationKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28915. {
  28916. m_deferredOperationKHR = VK_NULL_HANDLE;
  28917. return *this;
  28918. }
  28919. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28920. auto operator<=>( DeferredOperationKHR const& ) const = default;
  28921. #else
  28922. bool operator==( DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  28923. {
  28924. return m_deferredOperationKHR == rhs.m_deferredOperationKHR;
  28925. }
  28926. bool operator!=(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  28927. {
  28928. return m_deferredOperationKHR != rhs.m_deferredOperationKHR;
  28929. }
  28930. bool operator<(DeferredOperationKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  28931. {
  28932. return m_deferredOperationKHR < rhs.m_deferredOperationKHR;
  28933. }
  28934. #endif
  28935. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDeferredOperationKHR() const VULKAN_HPP_NOEXCEPT
  28936. {
  28937. return m_deferredOperationKHR;
  28938. }
  28939. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  28940. {
  28941. return m_deferredOperationKHR != VK_NULL_HANDLE;
  28942. }
  28943. bool operator!() const VULKAN_HPP_NOEXCEPT
  28944. {
  28945. return m_deferredOperationKHR == VK_NULL_HANDLE;
  28946. }
  28947. private:
  28948. VkDeferredOperationKHR m_deferredOperationKHR;
  28949. };
  28950. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
  28951. template <>
  28952. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDeferredOperationKHR>
  28953. {
  28954. using type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
  28955. };
  28956. template <>
  28957. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDeferredOperationKHR>
  28958. {
  28959. using Type = VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
  28960. };
  28961. template <>
  28962. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
  28963. {
  28964. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  28965. };
  28966. class PipelineCache
  28967. {
  28968. public:
  28969. using CType = VkPipelineCache;
  28970. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache;
  28971. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache;
  28972. public:
  28973. VULKAN_HPP_CONSTEXPR PipelineCache() VULKAN_HPP_NOEXCEPT
  28974. : m_pipelineCache(VK_NULL_HANDLE)
  28975. {}
  28976. VULKAN_HPP_CONSTEXPR PipelineCache( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28977. : m_pipelineCache(VK_NULL_HANDLE)
  28978. {}
  28979. VULKAN_HPP_TYPESAFE_EXPLICIT PipelineCache( VkPipelineCache pipelineCache ) VULKAN_HPP_NOEXCEPT
  28980. : m_pipelineCache( pipelineCache )
  28981. {}
  28982. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  28983. PipelineCache & operator=(VkPipelineCache pipelineCache) VULKAN_HPP_NOEXCEPT
  28984. {
  28985. m_pipelineCache = pipelineCache;
  28986. return *this;
  28987. }
  28988. #endif
  28989. PipelineCache & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  28990. {
  28991. m_pipelineCache = VK_NULL_HANDLE;
  28992. return *this;
  28993. }
  28994. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  28995. auto operator<=>( PipelineCache const& ) const = default;
  28996. #else
  28997. bool operator==( PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
  28998. {
  28999. return m_pipelineCache == rhs.m_pipelineCache;
  29000. }
  29001. bool operator!=(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
  29002. {
  29003. return m_pipelineCache != rhs.m_pipelineCache;
  29004. }
  29005. bool operator<(PipelineCache const & rhs ) const VULKAN_HPP_NOEXCEPT
  29006. {
  29007. return m_pipelineCache < rhs.m_pipelineCache;
  29008. }
  29009. #endif
  29010. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPipelineCache() const VULKAN_HPP_NOEXCEPT
  29011. {
  29012. return m_pipelineCache;
  29013. }
  29014. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  29015. {
  29016. return m_pipelineCache != VK_NULL_HANDLE;
  29017. }
  29018. bool operator!() const VULKAN_HPP_NOEXCEPT
  29019. {
  29020. return m_pipelineCache == VK_NULL_HANDLE;
  29021. }
  29022. private:
  29023. VkPipelineCache m_pipelineCache;
  29024. };
  29025. static_assert( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
  29026. template <>
  29027. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePipelineCache>
  29028. {
  29029. using type = VULKAN_HPP_NAMESPACE::PipelineCache;
  29030. };
  29031. template <>
  29032. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePipelineCache>
  29033. {
  29034. using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
  29035. };
  29036. template <>
  29037. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePipelineCache>
  29038. {
  29039. using Type = VULKAN_HPP_NAMESPACE::PipelineCache;
  29040. };
  29041. template <>
  29042. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PipelineCache>
  29043. {
  29044. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  29045. };
  29046. struct EventCreateInfo
  29047. {
  29048. static const bool allowDuplicate = false;
  29049. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eEventCreateInfo;
  29050. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29051. VULKAN_HPP_CONSTEXPR EventCreateInfo(VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  29052. : flags( flags_ )
  29053. {}
  29054. VULKAN_HPP_CONSTEXPR EventCreateInfo( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29055. EventCreateInfo( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29056. : EventCreateInfo( *reinterpret_cast<EventCreateInfo const *>( &rhs ) )
  29057. {}
  29058. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29059. VULKAN_HPP_CONSTEXPR_14 EventCreateInfo & operator=( EventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29060. EventCreateInfo & operator=( VkEventCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29061. {
  29062. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::EventCreateInfo const *>( &rhs );
  29063. return *this;
  29064. }
  29065. EventCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29066. {
  29067. pNext = pNext_;
  29068. return *this;
  29069. }
  29070. EventCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::EventCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29071. {
  29072. flags = flags_;
  29073. return *this;
  29074. }
  29075. operator VkEventCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29076. {
  29077. return *reinterpret_cast<const VkEventCreateInfo*>( this );
  29078. }
  29079. operator VkEventCreateInfo &() VULKAN_HPP_NOEXCEPT
  29080. {
  29081. return *reinterpret_cast<VkEventCreateInfo*>( this );
  29082. }
  29083. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29084. auto operator<=>( EventCreateInfo const& ) const = default;
  29085. #else
  29086. bool operator==( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29087. {
  29088. return ( sType == rhs.sType )
  29089. && ( pNext == rhs.pNext )
  29090. && ( flags == rhs.flags );
  29091. }
  29092. bool operator!=( EventCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29093. {
  29094. return !operator==( rhs );
  29095. }
  29096. #endif
  29097. public:
  29098. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eEventCreateInfo;
  29099. const void* pNext = {};
  29100. VULKAN_HPP_NAMESPACE::EventCreateFlags flags = {};
  29101. };
  29102. static_assert( sizeof( EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
  29103. static_assert( std::is_standard_layout<EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29104. template <>
  29105. struct CppType<StructureType, StructureType::eEventCreateInfo>
  29106. {
  29107. using Type = EventCreateInfo;
  29108. };
  29109. struct FenceCreateInfo
  29110. {
  29111. static const bool allowDuplicate = false;
  29112. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceCreateInfo;
  29113. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29114. VULKAN_HPP_CONSTEXPR FenceCreateInfo(VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  29115. : flags( flags_ )
  29116. {}
  29117. VULKAN_HPP_CONSTEXPR FenceCreateInfo( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29118. FenceCreateInfo( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29119. : FenceCreateInfo( *reinterpret_cast<FenceCreateInfo const *>( &rhs ) )
  29120. {}
  29121. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29122. VULKAN_HPP_CONSTEXPR_14 FenceCreateInfo & operator=( FenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29123. FenceCreateInfo & operator=( VkFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29124. {
  29125. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceCreateInfo const *>( &rhs );
  29126. return *this;
  29127. }
  29128. FenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29129. {
  29130. pNext = pNext_;
  29131. return *this;
  29132. }
  29133. FenceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FenceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29134. {
  29135. flags = flags_;
  29136. return *this;
  29137. }
  29138. operator VkFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29139. {
  29140. return *reinterpret_cast<const VkFenceCreateInfo*>( this );
  29141. }
  29142. operator VkFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
  29143. {
  29144. return *reinterpret_cast<VkFenceCreateInfo*>( this );
  29145. }
  29146. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29147. auto operator<=>( FenceCreateInfo const& ) const = default;
  29148. #else
  29149. bool operator==( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29150. {
  29151. return ( sType == rhs.sType )
  29152. && ( pNext == rhs.pNext )
  29153. && ( flags == rhs.flags );
  29154. }
  29155. bool operator!=( FenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29156. {
  29157. return !operator==( rhs );
  29158. }
  29159. #endif
  29160. public:
  29161. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceCreateInfo;
  29162. const void* pNext = {};
  29163. VULKAN_HPP_NAMESPACE::FenceCreateFlags flags = {};
  29164. };
  29165. static_assert( sizeof( FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
  29166. static_assert( std::is_standard_layout<FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29167. template <>
  29168. struct CppType<StructureType, StructureType::eFenceCreateInfo>
  29169. {
  29170. using Type = FenceCreateInfo;
  29171. };
  29172. struct FramebufferCreateInfo
  29173. {
  29174. static const bool allowDuplicate = false;
  29175. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferCreateInfo;
  29176. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29177. VULKAN_HPP_CONSTEXPR FramebufferCreateInfo(VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {}) VULKAN_HPP_NOEXCEPT
  29178. : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), width( width_ ), height( height_ ), layers( layers_ )
  29179. {}
  29180. VULKAN_HPP_CONSTEXPR FramebufferCreateInfo( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29181. FramebufferCreateInfo( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29182. : FramebufferCreateInfo( *reinterpret_cast<FramebufferCreateInfo const *>( &rhs ) )
  29183. {}
  29184. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29185. FramebufferCreateInfo( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_, VULKAN_HPP_NAMESPACE::RenderPass renderPass_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layers_ = {} )
  29186. : flags( flags_ ), renderPass( renderPass_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), width( width_ ), height( height_ ), layers( layers_ )
  29187. {}
  29188. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29189. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29190. VULKAN_HPP_CONSTEXPR_14 FramebufferCreateInfo & operator=( FramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29191. FramebufferCreateInfo & operator=( VkFramebufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29192. {
  29193. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo const *>( &rhs );
  29194. return *this;
  29195. }
  29196. FramebufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29197. {
  29198. pNext = pNext_;
  29199. return *this;
  29200. }
  29201. FramebufferCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29202. {
  29203. flags = flags_;
  29204. return *this;
  29205. }
  29206. FramebufferCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
  29207. {
  29208. renderPass = renderPass_;
  29209. return *this;
  29210. }
  29211. FramebufferCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
  29212. {
  29213. attachmentCount = attachmentCount_;
  29214. return *this;
  29215. }
  29216. FramebufferCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
  29217. {
  29218. pAttachments = pAttachments_;
  29219. return *this;
  29220. }
  29221. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29222. FramebufferCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
  29223. {
  29224. attachmentCount = static_cast<uint32_t>( attachments_.size() );
  29225. pAttachments = attachments_.data();
  29226. return *this;
  29227. }
  29228. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29229. FramebufferCreateInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
  29230. {
  29231. width = width_;
  29232. return *this;
  29233. }
  29234. FramebufferCreateInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
  29235. {
  29236. height = height_;
  29237. return *this;
  29238. }
  29239. FramebufferCreateInfo & setLayers( uint32_t layers_ ) VULKAN_HPP_NOEXCEPT
  29240. {
  29241. layers = layers_;
  29242. return *this;
  29243. }
  29244. operator VkFramebufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29245. {
  29246. return *reinterpret_cast<const VkFramebufferCreateInfo*>( this );
  29247. }
  29248. operator VkFramebufferCreateInfo &() VULKAN_HPP_NOEXCEPT
  29249. {
  29250. return *reinterpret_cast<VkFramebufferCreateInfo*>( this );
  29251. }
  29252. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29253. auto operator<=>( FramebufferCreateInfo const& ) const = default;
  29254. #else
  29255. bool operator==( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29256. {
  29257. return ( sType == rhs.sType )
  29258. && ( pNext == rhs.pNext )
  29259. && ( flags == rhs.flags )
  29260. && ( renderPass == rhs.renderPass )
  29261. && ( attachmentCount == rhs.attachmentCount )
  29262. && ( pAttachments == rhs.pAttachments )
  29263. && ( width == rhs.width )
  29264. && ( height == rhs.height )
  29265. && ( layers == rhs.layers );
  29266. }
  29267. bool operator!=( FramebufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29268. {
  29269. return !operator==( rhs );
  29270. }
  29271. #endif
  29272. public:
  29273. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferCreateInfo;
  29274. const void* pNext = {};
  29275. VULKAN_HPP_NAMESPACE::FramebufferCreateFlags flags = {};
  29276. VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
  29277. uint32_t attachmentCount = {};
  29278. const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
  29279. uint32_t width = {};
  29280. uint32_t height = {};
  29281. uint32_t layers = {};
  29282. };
  29283. static_assert( sizeof( FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ), "struct and wrapper have different size!" );
  29284. static_assert( std::is_standard_layout<FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29285. template <>
  29286. struct CppType<StructureType, StructureType::eFramebufferCreateInfo>
  29287. {
  29288. using Type = FramebufferCreateInfo;
  29289. };
  29290. struct VertexInputBindingDescription
  29291. {
  29292. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29293. VULKAN_HPP_CONSTEXPR VertexInputBindingDescription(uint32_t binding_ = {}, uint32_t stride_ = {}, VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex) VULKAN_HPP_NOEXCEPT
  29294. : binding( binding_ ), stride( stride_ ), inputRate( inputRate_ )
  29295. {}
  29296. VULKAN_HPP_CONSTEXPR VertexInputBindingDescription( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29297. VertexInputBindingDescription( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  29298. : VertexInputBindingDescription( *reinterpret_cast<VertexInputBindingDescription const *>( &rhs ) )
  29299. {}
  29300. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29301. VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDescription & operator=( VertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29302. VertexInputBindingDescription & operator=( VkVertexInputBindingDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  29303. {
  29304. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription const *>( &rhs );
  29305. return *this;
  29306. }
  29307. VertexInputBindingDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
  29308. {
  29309. binding = binding_;
  29310. return *this;
  29311. }
  29312. VertexInputBindingDescription & setStride( uint32_t stride_ ) VULKAN_HPP_NOEXCEPT
  29313. {
  29314. stride = stride_;
  29315. return *this;
  29316. }
  29317. VertexInputBindingDescription & setInputRate( VULKAN_HPP_NAMESPACE::VertexInputRate inputRate_ ) VULKAN_HPP_NOEXCEPT
  29318. {
  29319. inputRate = inputRate_;
  29320. return *this;
  29321. }
  29322. operator VkVertexInputBindingDescription const&() const VULKAN_HPP_NOEXCEPT
  29323. {
  29324. return *reinterpret_cast<const VkVertexInputBindingDescription*>( this );
  29325. }
  29326. operator VkVertexInputBindingDescription &() VULKAN_HPP_NOEXCEPT
  29327. {
  29328. return *reinterpret_cast<VkVertexInputBindingDescription*>( this );
  29329. }
  29330. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29331. auto operator<=>( VertexInputBindingDescription const& ) const = default;
  29332. #else
  29333. bool operator==( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  29334. {
  29335. return ( binding == rhs.binding )
  29336. && ( stride == rhs.stride )
  29337. && ( inputRate == rhs.inputRate );
  29338. }
  29339. bool operator!=( VertexInputBindingDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  29340. {
  29341. return !operator==( rhs );
  29342. }
  29343. #endif
  29344. public:
  29345. uint32_t binding = {};
  29346. uint32_t stride = {};
  29347. VULKAN_HPP_NAMESPACE::VertexInputRate inputRate = VULKAN_HPP_NAMESPACE::VertexInputRate::eVertex;
  29348. };
  29349. static_assert( sizeof( VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ), "struct and wrapper have different size!" );
  29350. static_assert( std::is_standard_layout<VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
  29351. struct VertexInputAttributeDescription
  29352. {
  29353. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29354. VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription(uint32_t location_ = {}, uint32_t binding_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, uint32_t offset_ = {}) VULKAN_HPP_NOEXCEPT
  29355. : location( location_ ), binding( binding_ ), format( format_ ), offset( offset_ )
  29356. {}
  29357. VULKAN_HPP_CONSTEXPR VertexInputAttributeDescription( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29358. VertexInputAttributeDescription( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  29359. : VertexInputAttributeDescription( *reinterpret_cast<VertexInputAttributeDescription const *>( &rhs ) )
  29360. {}
  29361. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29362. VULKAN_HPP_CONSTEXPR_14 VertexInputAttributeDescription & operator=( VertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29363. VertexInputAttributeDescription & operator=( VkVertexInputAttributeDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  29364. {
  29365. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription const *>( &rhs );
  29366. return *this;
  29367. }
  29368. VertexInputAttributeDescription & setLocation( uint32_t location_ ) VULKAN_HPP_NOEXCEPT
  29369. {
  29370. location = location_;
  29371. return *this;
  29372. }
  29373. VertexInputAttributeDescription & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
  29374. {
  29375. binding = binding_;
  29376. return *this;
  29377. }
  29378. VertexInputAttributeDescription & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  29379. {
  29380. format = format_;
  29381. return *this;
  29382. }
  29383. VertexInputAttributeDescription & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
  29384. {
  29385. offset = offset_;
  29386. return *this;
  29387. }
  29388. operator VkVertexInputAttributeDescription const&() const VULKAN_HPP_NOEXCEPT
  29389. {
  29390. return *reinterpret_cast<const VkVertexInputAttributeDescription*>( this );
  29391. }
  29392. operator VkVertexInputAttributeDescription &() VULKAN_HPP_NOEXCEPT
  29393. {
  29394. return *reinterpret_cast<VkVertexInputAttributeDescription*>( this );
  29395. }
  29396. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29397. auto operator<=>( VertexInputAttributeDescription const& ) const = default;
  29398. #else
  29399. bool operator==( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  29400. {
  29401. return ( location == rhs.location )
  29402. && ( binding == rhs.binding )
  29403. && ( format == rhs.format )
  29404. && ( offset == rhs.offset );
  29405. }
  29406. bool operator!=( VertexInputAttributeDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  29407. {
  29408. return !operator==( rhs );
  29409. }
  29410. #endif
  29411. public:
  29412. uint32_t location = {};
  29413. uint32_t binding = {};
  29414. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  29415. uint32_t offset = {};
  29416. };
  29417. static_assert( sizeof( VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ), "struct and wrapper have different size!" );
  29418. static_assert( std::is_standard_layout<VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
  29419. struct PipelineVertexInputStateCreateInfo
  29420. {
  29421. static const bool allowDuplicate = false;
  29422. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputStateCreateInfo;
  29423. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29424. VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ = {}, uint32_t vertexBindingDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ = {}, uint32_t vertexAttributeDescriptionCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ = {}) VULKAN_HPP_NOEXCEPT
  29425. : flags( flags_ ), vertexBindingDescriptionCount( vertexBindingDescriptionCount_ ), pVertexBindingDescriptions( pVertexBindingDescriptions_ ), vertexAttributeDescriptionCount( vertexAttributeDescriptionCount_ ), pVertexAttributeDescriptions( pVertexAttributeDescriptions_ )
  29426. {}
  29427. VULKAN_HPP_CONSTEXPR PipelineVertexInputStateCreateInfo( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29428. PipelineVertexInputStateCreateInfo( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29429. : PipelineVertexInputStateCreateInfo( *reinterpret_cast<PipelineVertexInputStateCreateInfo const *>( &rhs ) )
  29430. {}
  29431. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29432. PipelineVertexInputStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ = {} )
  29433. : flags( flags_ ), vertexBindingDescriptionCount( static_cast<uint32_t>( vertexBindingDescriptions_.size() ) ), pVertexBindingDescriptions( vertexBindingDescriptions_.data() ), vertexAttributeDescriptionCount( static_cast<uint32_t>( vertexAttributeDescriptions_.size() ) ), pVertexAttributeDescriptions( vertexAttributeDescriptions_.data() )
  29434. {}
  29435. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29436. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29437. VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputStateCreateInfo & operator=( PipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29438. PipelineVertexInputStateCreateInfo & operator=( VkPipelineVertexInputStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29439. {
  29440. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo const *>( &rhs );
  29441. return *this;
  29442. }
  29443. PipelineVertexInputStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29444. {
  29445. pNext = pNext_;
  29446. return *this;
  29447. }
  29448. PipelineVertexInputStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29449. {
  29450. flags = flags_;
  29451. return *this;
  29452. }
  29453. PipelineVertexInputStateCreateInfo & setVertexBindingDescriptionCount( uint32_t vertexBindingDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
  29454. {
  29455. vertexBindingDescriptionCount = vertexBindingDescriptionCount_;
  29456. return *this;
  29457. }
  29458. PipelineVertexInputStateCreateInfo & setPVertexBindingDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
  29459. {
  29460. pVertexBindingDescriptions = pVertexBindingDescriptions_;
  29461. return *this;
  29462. }
  29463. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29464. PipelineVertexInputStateCreateInfo & setVertexBindingDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription> const & vertexBindingDescriptions_ ) VULKAN_HPP_NOEXCEPT
  29465. {
  29466. vertexBindingDescriptionCount = static_cast<uint32_t>( vertexBindingDescriptions_.size() );
  29467. pVertexBindingDescriptions = vertexBindingDescriptions_.data();
  29468. return *this;
  29469. }
  29470. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29471. PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptionCount( uint32_t vertexAttributeDescriptionCount_ ) VULKAN_HPP_NOEXCEPT
  29472. {
  29473. vertexAttributeDescriptionCount = vertexAttributeDescriptionCount_;
  29474. return *this;
  29475. }
  29476. PipelineVertexInputStateCreateInfo & setPVertexAttributeDescriptions( const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
  29477. {
  29478. pVertexAttributeDescriptions = pVertexAttributeDescriptions_;
  29479. return *this;
  29480. }
  29481. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29482. PipelineVertexInputStateCreateInfo & setVertexAttributeDescriptions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription> const & vertexAttributeDescriptions_ ) VULKAN_HPP_NOEXCEPT
  29483. {
  29484. vertexAttributeDescriptionCount = static_cast<uint32_t>( vertexAttributeDescriptions_.size() );
  29485. pVertexAttributeDescriptions = vertexAttributeDescriptions_.data();
  29486. return *this;
  29487. }
  29488. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29489. operator VkPipelineVertexInputStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29490. {
  29491. return *reinterpret_cast<const VkPipelineVertexInputStateCreateInfo*>( this );
  29492. }
  29493. operator VkPipelineVertexInputStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29494. {
  29495. return *reinterpret_cast<VkPipelineVertexInputStateCreateInfo*>( this );
  29496. }
  29497. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29498. auto operator<=>( PipelineVertexInputStateCreateInfo const& ) const = default;
  29499. #else
  29500. bool operator==( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29501. {
  29502. return ( sType == rhs.sType )
  29503. && ( pNext == rhs.pNext )
  29504. && ( flags == rhs.flags )
  29505. && ( vertexBindingDescriptionCount == rhs.vertexBindingDescriptionCount )
  29506. && ( pVertexBindingDescriptions == rhs.pVertexBindingDescriptions )
  29507. && ( vertexAttributeDescriptionCount == rhs.vertexAttributeDescriptionCount )
  29508. && ( pVertexAttributeDescriptions == rhs.pVertexAttributeDescriptions );
  29509. }
  29510. bool operator!=( PipelineVertexInputStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29511. {
  29512. return !operator==( rhs );
  29513. }
  29514. #endif
  29515. public:
  29516. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputStateCreateInfo;
  29517. const void* pNext = {};
  29518. VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags flags = {};
  29519. uint32_t vertexBindingDescriptionCount = {};
  29520. const VULKAN_HPP_NAMESPACE::VertexInputBindingDescription* pVertexBindingDescriptions = {};
  29521. uint32_t vertexAttributeDescriptionCount = {};
  29522. const VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription* pVertexAttributeDescriptions = {};
  29523. };
  29524. static_assert( sizeof( PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ), "struct and wrapper have different size!" );
  29525. static_assert( std::is_standard_layout<PipelineVertexInputStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29526. template <>
  29527. struct CppType<StructureType, StructureType::ePipelineVertexInputStateCreateInfo>
  29528. {
  29529. using Type = PipelineVertexInputStateCreateInfo;
  29530. };
  29531. struct PipelineInputAssemblyStateCreateInfo
  29532. {
  29533. static const bool allowDuplicate = false;
  29534. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInputAssemblyStateCreateInfo;
  29535. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29536. VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList, VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ = {}) VULKAN_HPP_NOEXCEPT
  29537. : flags( flags_ ), topology( topology_ ), primitiveRestartEnable( primitiveRestartEnable_ )
  29538. {}
  29539. VULKAN_HPP_CONSTEXPR PipelineInputAssemblyStateCreateInfo( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29540. PipelineInputAssemblyStateCreateInfo( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29541. : PipelineInputAssemblyStateCreateInfo( *reinterpret_cast<PipelineInputAssemblyStateCreateInfo const *>( &rhs ) )
  29542. {}
  29543. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29544. VULKAN_HPP_CONSTEXPR_14 PipelineInputAssemblyStateCreateInfo & operator=( PipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29545. PipelineInputAssemblyStateCreateInfo & operator=( VkPipelineInputAssemblyStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29546. {
  29547. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo const *>( &rhs );
  29548. return *this;
  29549. }
  29550. PipelineInputAssemblyStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29551. {
  29552. pNext = pNext_;
  29553. return *this;
  29554. }
  29555. PipelineInputAssemblyStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29556. {
  29557. flags = flags_;
  29558. return *this;
  29559. }
  29560. PipelineInputAssemblyStateCreateInfo & setTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology topology_ ) VULKAN_HPP_NOEXCEPT
  29561. {
  29562. topology = topology_;
  29563. return *this;
  29564. }
  29565. PipelineInputAssemblyStateCreateInfo & setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable_ ) VULKAN_HPP_NOEXCEPT
  29566. {
  29567. primitiveRestartEnable = primitiveRestartEnable_;
  29568. return *this;
  29569. }
  29570. operator VkPipelineInputAssemblyStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29571. {
  29572. return *reinterpret_cast<const VkPipelineInputAssemblyStateCreateInfo*>( this );
  29573. }
  29574. operator VkPipelineInputAssemblyStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29575. {
  29576. return *reinterpret_cast<VkPipelineInputAssemblyStateCreateInfo*>( this );
  29577. }
  29578. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29579. auto operator<=>( PipelineInputAssemblyStateCreateInfo const& ) const = default;
  29580. #else
  29581. bool operator==( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29582. {
  29583. return ( sType == rhs.sType )
  29584. && ( pNext == rhs.pNext )
  29585. && ( flags == rhs.flags )
  29586. && ( topology == rhs.topology )
  29587. && ( primitiveRestartEnable == rhs.primitiveRestartEnable );
  29588. }
  29589. bool operator!=( PipelineInputAssemblyStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29590. {
  29591. return !operator==( rhs );
  29592. }
  29593. #endif
  29594. public:
  29595. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInputAssemblyStateCreateInfo;
  29596. const void* pNext = {};
  29597. VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags flags = {};
  29598. VULKAN_HPP_NAMESPACE::PrimitiveTopology topology = VULKAN_HPP_NAMESPACE::PrimitiveTopology::ePointList;
  29599. VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable = {};
  29600. };
  29601. static_assert( sizeof( PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ), "struct and wrapper have different size!" );
  29602. static_assert( std::is_standard_layout<PipelineInputAssemblyStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29603. template <>
  29604. struct CppType<StructureType, StructureType::ePipelineInputAssemblyStateCreateInfo>
  29605. {
  29606. using Type = PipelineInputAssemblyStateCreateInfo;
  29607. };
  29608. struct PipelineTessellationStateCreateInfo
  29609. {
  29610. static const bool allowDuplicate = false;
  29611. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationStateCreateInfo;
  29612. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29613. VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ = {}, uint32_t patchControlPoints_ = {}) VULKAN_HPP_NOEXCEPT
  29614. : flags( flags_ ), patchControlPoints( patchControlPoints_ )
  29615. {}
  29616. VULKAN_HPP_CONSTEXPR PipelineTessellationStateCreateInfo( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29617. PipelineTessellationStateCreateInfo( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29618. : PipelineTessellationStateCreateInfo( *reinterpret_cast<PipelineTessellationStateCreateInfo const *>( &rhs ) )
  29619. {}
  29620. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29621. VULKAN_HPP_CONSTEXPR_14 PipelineTessellationStateCreateInfo & operator=( PipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29622. PipelineTessellationStateCreateInfo & operator=( VkPipelineTessellationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29623. {
  29624. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo const *>( &rhs );
  29625. return *this;
  29626. }
  29627. PipelineTessellationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29628. {
  29629. pNext = pNext_;
  29630. return *this;
  29631. }
  29632. PipelineTessellationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29633. {
  29634. flags = flags_;
  29635. return *this;
  29636. }
  29637. PipelineTessellationStateCreateInfo & setPatchControlPoints( uint32_t patchControlPoints_ ) VULKAN_HPP_NOEXCEPT
  29638. {
  29639. patchControlPoints = patchControlPoints_;
  29640. return *this;
  29641. }
  29642. operator VkPipelineTessellationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29643. {
  29644. return *reinterpret_cast<const VkPipelineTessellationStateCreateInfo*>( this );
  29645. }
  29646. operator VkPipelineTessellationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29647. {
  29648. return *reinterpret_cast<VkPipelineTessellationStateCreateInfo*>( this );
  29649. }
  29650. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29651. auto operator<=>( PipelineTessellationStateCreateInfo const& ) const = default;
  29652. #else
  29653. bool operator==( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29654. {
  29655. return ( sType == rhs.sType )
  29656. && ( pNext == rhs.pNext )
  29657. && ( flags == rhs.flags )
  29658. && ( patchControlPoints == rhs.patchControlPoints );
  29659. }
  29660. bool operator!=( PipelineTessellationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29661. {
  29662. return !operator==( rhs );
  29663. }
  29664. #endif
  29665. public:
  29666. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationStateCreateInfo;
  29667. const void* pNext = {};
  29668. VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags flags = {};
  29669. uint32_t patchControlPoints = {};
  29670. };
  29671. static_assert( sizeof( PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ), "struct and wrapper have different size!" );
  29672. static_assert( std::is_standard_layout<PipelineTessellationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29673. template <>
  29674. struct CppType<StructureType, StructureType::ePipelineTessellationStateCreateInfo>
  29675. {
  29676. using Type = PipelineTessellationStateCreateInfo;
  29677. };
  29678. struct PipelineViewportStateCreateInfo
  29679. {
  29680. static const bool allowDuplicate = false;
  29681. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportStateCreateInfo;
  29682. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29683. VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ = {}, uint32_t scissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ = {}) VULKAN_HPP_NOEXCEPT
  29684. : flags( flags_ ), viewportCount( viewportCount_ ), pViewports( pViewports_ ), scissorCount( scissorCount_ ), pScissors( pScissors_ )
  29685. {}
  29686. VULKAN_HPP_CONSTEXPR PipelineViewportStateCreateInfo( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29687. PipelineViewportStateCreateInfo( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29688. : PipelineViewportStateCreateInfo( *reinterpret_cast<PipelineViewportStateCreateInfo const *>( &rhs ) )
  29689. {}
  29690. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29691. PipelineViewportStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ = {} )
  29692. : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewports_.size() ) ), pViewports( viewports_.data() ), scissorCount( static_cast<uint32_t>( scissors_.size() ) ), pScissors( scissors_.data() )
  29693. {}
  29694. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29695. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29696. VULKAN_HPP_CONSTEXPR_14 PipelineViewportStateCreateInfo & operator=( PipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29697. PipelineViewportStateCreateInfo & operator=( VkPipelineViewportStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29698. {
  29699. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo const *>( &rhs );
  29700. return *this;
  29701. }
  29702. PipelineViewportStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29703. {
  29704. pNext = pNext_;
  29705. return *this;
  29706. }
  29707. PipelineViewportStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29708. {
  29709. flags = flags_;
  29710. return *this;
  29711. }
  29712. PipelineViewportStateCreateInfo & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
  29713. {
  29714. viewportCount = viewportCount_;
  29715. return *this;
  29716. }
  29717. PipelineViewportStateCreateInfo & setPViewports( const VULKAN_HPP_NAMESPACE::Viewport* pViewports_ ) VULKAN_HPP_NOEXCEPT
  29718. {
  29719. pViewports = pViewports_;
  29720. return *this;
  29721. }
  29722. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29723. PipelineViewportStateCreateInfo & setViewports( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports_ ) VULKAN_HPP_NOEXCEPT
  29724. {
  29725. viewportCount = static_cast<uint32_t>( viewports_.size() );
  29726. pViewports = viewports_.data();
  29727. return *this;
  29728. }
  29729. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29730. PipelineViewportStateCreateInfo & setScissorCount( uint32_t scissorCount_ ) VULKAN_HPP_NOEXCEPT
  29731. {
  29732. scissorCount = scissorCount_;
  29733. return *this;
  29734. }
  29735. PipelineViewportStateCreateInfo & setPScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pScissors_ ) VULKAN_HPP_NOEXCEPT
  29736. {
  29737. pScissors = pScissors_;
  29738. return *this;
  29739. }
  29740. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29741. PipelineViewportStateCreateInfo & setScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors_ ) VULKAN_HPP_NOEXCEPT
  29742. {
  29743. scissorCount = static_cast<uint32_t>( scissors_.size() );
  29744. pScissors = scissors_.data();
  29745. return *this;
  29746. }
  29747. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  29748. operator VkPipelineViewportStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29749. {
  29750. return *reinterpret_cast<const VkPipelineViewportStateCreateInfo*>( this );
  29751. }
  29752. operator VkPipelineViewportStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29753. {
  29754. return *reinterpret_cast<VkPipelineViewportStateCreateInfo*>( this );
  29755. }
  29756. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29757. auto operator<=>( PipelineViewportStateCreateInfo const& ) const = default;
  29758. #else
  29759. bool operator==( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29760. {
  29761. return ( sType == rhs.sType )
  29762. && ( pNext == rhs.pNext )
  29763. && ( flags == rhs.flags )
  29764. && ( viewportCount == rhs.viewportCount )
  29765. && ( pViewports == rhs.pViewports )
  29766. && ( scissorCount == rhs.scissorCount )
  29767. && ( pScissors == rhs.pScissors );
  29768. }
  29769. bool operator!=( PipelineViewportStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29770. {
  29771. return !operator==( rhs );
  29772. }
  29773. #endif
  29774. public:
  29775. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportStateCreateInfo;
  29776. const void* pNext = {};
  29777. VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags flags = {};
  29778. uint32_t viewportCount = {};
  29779. const VULKAN_HPP_NAMESPACE::Viewport* pViewports = {};
  29780. uint32_t scissorCount = {};
  29781. const VULKAN_HPP_NAMESPACE::Rect2D* pScissors = {};
  29782. };
  29783. static_assert( sizeof( PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ), "struct and wrapper have different size!" );
  29784. static_assert( std::is_standard_layout<PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29785. template <>
  29786. struct CppType<StructureType, StructureType::ePipelineViewportStateCreateInfo>
  29787. {
  29788. using Type = PipelineViewportStateCreateInfo;
  29789. };
  29790. struct PipelineRasterizationStateCreateInfo
  29791. {
  29792. static const bool allowDuplicate = false;
  29793. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateCreateInfo;
  29794. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29795. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ = {}, VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ = VULKAN_HPP_NAMESPACE::PolygonMode::eFill, VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ = {}, VULKAN_HPP_NAMESPACE::FrontFace frontFace_ = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise, VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ = {}, float depthBiasConstantFactor_ = {}, float depthBiasClamp_ = {}, float depthBiasSlopeFactor_ = {}, float lineWidth_ = {}) VULKAN_HPP_NOEXCEPT
  29796. : flags( flags_ ), depthClampEnable( depthClampEnable_ ), rasterizerDiscardEnable( rasterizerDiscardEnable_ ), polygonMode( polygonMode_ ), cullMode( cullMode_ ), frontFace( frontFace_ ), depthBiasEnable( depthBiasEnable_ ), depthBiasConstantFactor( depthBiasConstantFactor_ ), depthBiasClamp( depthBiasClamp_ ), depthBiasSlopeFactor( depthBiasSlopeFactor_ ), lineWidth( lineWidth_ )
  29797. {}
  29798. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateCreateInfo( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29799. PipelineRasterizationStateCreateInfo( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29800. : PipelineRasterizationStateCreateInfo( *reinterpret_cast<PipelineRasterizationStateCreateInfo const *>( &rhs ) )
  29801. {}
  29802. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29803. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateCreateInfo & operator=( PipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29804. PipelineRasterizationStateCreateInfo & operator=( VkPipelineRasterizationStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29805. {
  29806. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo const *>( &rhs );
  29807. return *this;
  29808. }
  29809. PipelineRasterizationStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29810. {
  29811. pNext = pNext_;
  29812. return *this;
  29813. }
  29814. PipelineRasterizationStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29815. {
  29816. flags = flags_;
  29817. return *this;
  29818. }
  29819. PipelineRasterizationStateCreateInfo & setDepthClampEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable_ ) VULKAN_HPP_NOEXCEPT
  29820. {
  29821. depthClampEnable = depthClampEnable_;
  29822. return *this;
  29823. }
  29824. PipelineRasterizationStateCreateInfo & setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable_ ) VULKAN_HPP_NOEXCEPT
  29825. {
  29826. rasterizerDiscardEnable = rasterizerDiscardEnable_;
  29827. return *this;
  29828. }
  29829. PipelineRasterizationStateCreateInfo & setPolygonMode( VULKAN_HPP_NAMESPACE::PolygonMode polygonMode_ ) VULKAN_HPP_NOEXCEPT
  29830. {
  29831. polygonMode = polygonMode_;
  29832. return *this;
  29833. }
  29834. PipelineRasterizationStateCreateInfo & setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode_ ) VULKAN_HPP_NOEXCEPT
  29835. {
  29836. cullMode = cullMode_;
  29837. return *this;
  29838. }
  29839. PipelineRasterizationStateCreateInfo & setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace_ ) VULKAN_HPP_NOEXCEPT
  29840. {
  29841. frontFace = frontFace_;
  29842. return *this;
  29843. }
  29844. PipelineRasterizationStateCreateInfo & setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable_ ) VULKAN_HPP_NOEXCEPT
  29845. {
  29846. depthBiasEnable = depthBiasEnable_;
  29847. return *this;
  29848. }
  29849. PipelineRasterizationStateCreateInfo & setDepthBiasConstantFactor( float depthBiasConstantFactor_ ) VULKAN_HPP_NOEXCEPT
  29850. {
  29851. depthBiasConstantFactor = depthBiasConstantFactor_;
  29852. return *this;
  29853. }
  29854. PipelineRasterizationStateCreateInfo & setDepthBiasClamp( float depthBiasClamp_ ) VULKAN_HPP_NOEXCEPT
  29855. {
  29856. depthBiasClamp = depthBiasClamp_;
  29857. return *this;
  29858. }
  29859. PipelineRasterizationStateCreateInfo & setDepthBiasSlopeFactor( float depthBiasSlopeFactor_ ) VULKAN_HPP_NOEXCEPT
  29860. {
  29861. depthBiasSlopeFactor = depthBiasSlopeFactor_;
  29862. return *this;
  29863. }
  29864. PipelineRasterizationStateCreateInfo & setLineWidth( float lineWidth_ ) VULKAN_HPP_NOEXCEPT
  29865. {
  29866. lineWidth = lineWidth_;
  29867. return *this;
  29868. }
  29869. operator VkPipelineRasterizationStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29870. {
  29871. return *reinterpret_cast<const VkPipelineRasterizationStateCreateInfo*>( this );
  29872. }
  29873. operator VkPipelineRasterizationStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29874. {
  29875. return *reinterpret_cast<VkPipelineRasterizationStateCreateInfo*>( this );
  29876. }
  29877. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29878. auto operator<=>( PipelineRasterizationStateCreateInfo const& ) const = default;
  29879. #else
  29880. bool operator==( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29881. {
  29882. return ( sType == rhs.sType )
  29883. && ( pNext == rhs.pNext )
  29884. && ( flags == rhs.flags )
  29885. && ( depthClampEnable == rhs.depthClampEnable )
  29886. && ( rasterizerDiscardEnable == rhs.rasterizerDiscardEnable )
  29887. && ( polygonMode == rhs.polygonMode )
  29888. && ( cullMode == rhs.cullMode )
  29889. && ( frontFace == rhs.frontFace )
  29890. && ( depthBiasEnable == rhs.depthBiasEnable )
  29891. && ( depthBiasConstantFactor == rhs.depthBiasConstantFactor )
  29892. && ( depthBiasClamp == rhs.depthBiasClamp )
  29893. && ( depthBiasSlopeFactor == rhs.depthBiasSlopeFactor )
  29894. && ( lineWidth == rhs.lineWidth );
  29895. }
  29896. bool operator!=( PipelineRasterizationStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29897. {
  29898. return !operator==( rhs );
  29899. }
  29900. #endif
  29901. public:
  29902. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateCreateInfo;
  29903. const void* pNext = {};
  29904. VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags flags = {};
  29905. VULKAN_HPP_NAMESPACE::Bool32 depthClampEnable = {};
  29906. VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable = {};
  29907. VULKAN_HPP_NAMESPACE::PolygonMode polygonMode = VULKAN_HPP_NAMESPACE::PolygonMode::eFill;
  29908. VULKAN_HPP_NAMESPACE::CullModeFlags cullMode = {};
  29909. VULKAN_HPP_NAMESPACE::FrontFace frontFace = VULKAN_HPP_NAMESPACE::FrontFace::eCounterClockwise;
  29910. VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable = {};
  29911. float depthBiasConstantFactor = {};
  29912. float depthBiasClamp = {};
  29913. float depthBiasSlopeFactor = {};
  29914. float lineWidth = {};
  29915. };
  29916. static_assert( sizeof( PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ), "struct and wrapper have different size!" );
  29917. static_assert( std::is_standard_layout<PipelineRasterizationStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  29918. template <>
  29919. struct CppType<StructureType, StructureType::ePipelineRasterizationStateCreateInfo>
  29920. {
  29921. using Type = PipelineRasterizationStateCreateInfo;
  29922. };
  29923. struct PipelineMultisampleStateCreateInfo
  29924. {
  29925. static const bool allowDuplicate = false;
  29926. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineMultisampleStateCreateInfo;
  29927. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29928. VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ = {}, float minSampleShading_ = {}, const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ = {}) VULKAN_HPP_NOEXCEPT
  29929. : flags( flags_ ), rasterizationSamples( rasterizationSamples_ ), sampleShadingEnable( sampleShadingEnable_ ), minSampleShading( minSampleShading_ ), pSampleMask( pSampleMask_ ), alphaToCoverageEnable( alphaToCoverageEnable_ ), alphaToOneEnable( alphaToOneEnable_ )
  29930. {}
  29931. VULKAN_HPP_CONSTEXPR PipelineMultisampleStateCreateInfo( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29932. PipelineMultisampleStateCreateInfo( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29933. : PipelineMultisampleStateCreateInfo( *reinterpret_cast<PipelineMultisampleStateCreateInfo const *>( &rhs ) )
  29934. {}
  29935. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  29936. VULKAN_HPP_CONSTEXPR_14 PipelineMultisampleStateCreateInfo & operator=( PipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  29937. PipelineMultisampleStateCreateInfo & operator=( VkPipelineMultisampleStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  29938. {
  29939. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo const *>( &rhs );
  29940. return *this;
  29941. }
  29942. PipelineMultisampleStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  29943. {
  29944. pNext = pNext_;
  29945. return *this;
  29946. }
  29947. PipelineMultisampleStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  29948. {
  29949. flags = flags_;
  29950. return *this;
  29951. }
  29952. PipelineMultisampleStateCreateInfo & setRasterizationSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ ) VULKAN_HPP_NOEXCEPT
  29953. {
  29954. rasterizationSamples = rasterizationSamples_;
  29955. return *this;
  29956. }
  29957. PipelineMultisampleStateCreateInfo & setSampleShadingEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable_ ) VULKAN_HPP_NOEXCEPT
  29958. {
  29959. sampleShadingEnable = sampleShadingEnable_;
  29960. return *this;
  29961. }
  29962. PipelineMultisampleStateCreateInfo & setMinSampleShading( float minSampleShading_ ) VULKAN_HPP_NOEXCEPT
  29963. {
  29964. minSampleShading = minSampleShading_;
  29965. return *this;
  29966. }
  29967. PipelineMultisampleStateCreateInfo & setPSampleMask( const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask_ ) VULKAN_HPP_NOEXCEPT
  29968. {
  29969. pSampleMask = pSampleMask_;
  29970. return *this;
  29971. }
  29972. PipelineMultisampleStateCreateInfo & setAlphaToCoverageEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable_ ) VULKAN_HPP_NOEXCEPT
  29973. {
  29974. alphaToCoverageEnable = alphaToCoverageEnable_;
  29975. return *this;
  29976. }
  29977. PipelineMultisampleStateCreateInfo & setAlphaToOneEnable( VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable_ ) VULKAN_HPP_NOEXCEPT
  29978. {
  29979. alphaToOneEnable = alphaToOneEnable_;
  29980. return *this;
  29981. }
  29982. operator VkPipelineMultisampleStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  29983. {
  29984. return *reinterpret_cast<const VkPipelineMultisampleStateCreateInfo*>( this );
  29985. }
  29986. operator VkPipelineMultisampleStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  29987. {
  29988. return *reinterpret_cast<VkPipelineMultisampleStateCreateInfo*>( this );
  29989. }
  29990. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  29991. auto operator<=>( PipelineMultisampleStateCreateInfo const& ) const = default;
  29992. #else
  29993. bool operator==( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  29994. {
  29995. return ( sType == rhs.sType )
  29996. && ( pNext == rhs.pNext )
  29997. && ( flags == rhs.flags )
  29998. && ( rasterizationSamples == rhs.rasterizationSamples )
  29999. && ( sampleShadingEnable == rhs.sampleShadingEnable )
  30000. && ( minSampleShading == rhs.minSampleShading )
  30001. && ( pSampleMask == rhs.pSampleMask )
  30002. && ( alphaToCoverageEnable == rhs.alphaToCoverageEnable )
  30003. && ( alphaToOneEnable == rhs.alphaToOneEnable );
  30004. }
  30005. bool operator!=( PipelineMultisampleStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30006. {
  30007. return !operator==( rhs );
  30008. }
  30009. #endif
  30010. public:
  30011. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineMultisampleStateCreateInfo;
  30012. const void* pNext = {};
  30013. VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags flags = {};
  30014. VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  30015. VULKAN_HPP_NAMESPACE::Bool32 sampleShadingEnable = {};
  30016. float minSampleShading = {};
  30017. const VULKAN_HPP_NAMESPACE::SampleMask* pSampleMask = {};
  30018. VULKAN_HPP_NAMESPACE::Bool32 alphaToCoverageEnable = {};
  30019. VULKAN_HPP_NAMESPACE::Bool32 alphaToOneEnable = {};
  30020. };
  30021. static_assert( sizeof( PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ), "struct and wrapper have different size!" );
  30022. static_assert( std::is_standard_layout<PipelineMultisampleStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30023. template <>
  30024. struct CppType<StructureType, StructureType::ePipelineMultisampleStateCreateInfo>
  30025. {
  30026. using Type = PipelineMultisampleStateCreateInfo;
  30027. };
  30028. struct StencilOpState
  30029. {
  30030. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30031. VULKAN_HPP_CONSTEXPR StencilOpState(VULKAN_HPP_NAMESPACE::StencilOp failOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp passOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ = VULKAN_HPP_NAMESPACE::StencilOp::eKeep, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, uint32_t compareMask_ = {}, uint32_t writeMask_ = {}, uint32_t reference_ = {}) VULKAN_HPP_NOEXCEPT
  30032. : failOp( failOp_ ), passOp( passOp_ ), depthFailOp( depthFailOp_ ), compareOp( compareOp_ ), compareMask( compareMask_ ), writeMask( writeMask_ ), reference( reference_ )
  30033. {}
  30034. VULKAN_HPP_CONSTEXPR StencilOpState( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30035. StencilOpState( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
  30036. : StencilOpState( *reinterpret_cast<StencilOpState const *>( &rhs ) )
  30037. {}
  30038. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30039. VULKAN_HPP_CONSTEXPR_14 StencilOpState & operator=( StencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30040. StencilOpState & operator=( VkStencilOpState const & rhs ) VULKAN_HPP_NOEXCEPT
  30041. {
  30042. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StencilOpState const *>( &rhs );
  30043. return *this;
  30044. }
  30045. StencilOpState & setFailOp( VULKAN_HPP_NAMESPACE::StencilOp failOp_ ) VULKAN_HPP_NOEXCEPT
  30046. {
  30047. failOp = failOp_;
  30048. return *this;
  30049. }
  30050. StencilOpState & setPassOp( VULKAN_HPP_NAMESPACE::StencilOp passOp_ ) VULKAN_HPP_NOEXCEPT
  30051. {
  30052. passOp = passOp_;
  30053. return *this;
  30054. }
  30055. StencilOpState & setDepthFailOp( VULKAN_HPP_NAMESPACE::StencilOp depthFailOp_ ) VULKAN_HPP_NOEXCEPT
  30056. {
  30057. depthFailOp = depthFailOp_;
  30058. return *this;
  30059. }
  30060. StencilOpState & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
  30061. {
  30062. compareOp = compareOp_;
  30063. return *this;
  30064. }
  30065. StencilOpState & setCompareMask( uint32_t compareMask_ ) VULKAN_HPP_NOEXCEPT
  30066. {
  30067. compareMask = compareMask_;
  30068. return *this;
  30069. }
  30070. StencilOpState & setWriteMask( uint32_t writeMask_ ) VULKAN_HPP_NOEXCEPT
  30071. {
  30072. writeMask = writeMask_;
  30073. return *this;
  30074. }
  30075. StencilOpState & setReference( uint32_t reference_ ) VULKAN_HPP_NOEXCEPT
  30076. {
  30077. reference = reference_;
  30078. return *this;
  30079. }
  30080. operator VkStencilOpState const&() const VULKAN_HPP_NOEXCEPT
  30081. {
  30082. return *reinterpret_cast<const VkStencilOpState*>( this );
  30083. }
  30084. operator VkStencilOpState &() VULKAN_HPP_NOEXCEPT
  30085. {
  30086. return *reinterpret_cast<VkStencilOpState*>( this );
  30087. }
  30088. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30089. auto operator<=>( StencilOpState const& ) const = default;
  30090. #else
  30091. bool operator==( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
  30092. {
  30093. return ( failOp == rhs.failOp )
  30094. && ( passOp == rhs.passOp )
  30095. && ( depthFailOp == rhs.depthFailOp )
  30096. && ( compareOp == rhs.compareOp )
  30097. && ( compareMask == rhs.compareMask )
  30098. && ( writeMask == rhs.writeMask )
  30099. && ( reference == rhs.reference );
  30100. }
  30101. bool operator!=( StencilOpState const& rhs ) const VULKAN_HPP_NOEXCEPT
  30102. {
  30103. return !operator==( rhs );
  30104. }
  30105. #endif
  30106. public:
  30107. VULKAN_HPP_NAMESPACE::StencilOp failOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
  30108. VULKAN_HPP_NAMESPACE::StencilOp passOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
  30109. VULKAN_HPP_NAMESPACE::StencilOp depthFailOp = VULKAN_HPP_NAMESPACE::StencilOp::eKeep;
  30110. VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
  30111. uint32_t compareMask = {};
  30112. uint32_t writeMask = {};
  30113. uint32_t reference = {};
  30114. };
  30115. static_assert( sizeof( StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
  30116. static_assert( std::is_standard_layout<StencilOpState>::value, "struct wrapper is not a standard layout!" );
  30117. struct PipelineDepthStencilStateCreateInfo
  30118. {
  30119. static const bool allowDuplicate = false;
  30120. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDepthStencilStateCreateInfo;
  30121. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30122. VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState front_ = {}, VULKAN_HPP_NAMESPACE::StencilOpState back_ = {}, float minDepthBounds_ = {}, float maxDepthBounds_ = {}) VULKAN_HPP_NOEXCEPT
  30123. : flags( flags_ ), depthTestEnable( depthTestEnable_ ), depthWriteEnable( depthWriteEnable_ ), depthCompareOp( depthCompareOp_ ), depthBoundsTestEnable( depthBoundsTestEnable_ ), stencilTestEnable( stencilTestEnable_ ), front( front_ ), back( back_ ), minDepthBounds( minDepthBounds_ ), maxDepthBounds( maxDepthBounds_ )
  30124. {}
  30125. VULKAN_HPP_CONSTEXPR PipelineDepthStencilStateCreateInfo( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30126. PipelineDepthStencilStateCreateInfo( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30127. : PipelineDepthStencilStateCreateInfo( *reinterpret_cast<PipelineDepthStencilStateCreateInfo const *>( &rhs ) )
  30128. {}
  30129. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30130. VULKAN_HPP_CONSTEXPR_14 PipelineDepthStencilStateCreateInfo & operator=( PipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30131. PipelineDepthStencilStateCreateInfo & operator=( VkPipelineDepthStencilStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30132. {
  30133. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo const *>( &rhs );
  30134. return *this;
  30135. }
  30136. PipelineDepthStencilStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30137. {
  30138. pNext = pNext_;
  30139. return *this;
  30140. }
  30141. PipelineDepthStencilStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30142. {
  30143. flags = flags_;
  30144. return *this;
  30145. }
  30146. PipelineDepthStencilStateCreateInfo & setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable_ ) VULKAN_HPP_NOEXCEPT
  30147. {
  30148. depthTestEnable = depthTestEnable_;
  30149. return *this;
  30150. }
  30151. PipelineDepthStencilStateCreateInfo & setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable_ ) VULKAN_HPP_NOEXCEPT
  30152. {
  30153. depthWriteEnable = depthWriteEnable_;
  30154. return *this;
  30155. }
  30156. PipelineDepthStencilStateCreateInfo & setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp_ ) VULKAN_HPP_NOEXCEPT
  30157. {
  30158. depthCompareOp = depthCompareOp_;
  30159. return *this;
  30160. }
  30161. PipelineDepthStencilStateCreateInfo & setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable_ ) VULKAN_HPP_NOEXCEPT
  30162. {
  30163. depthBoundsTestEnable = depthBoundsTestEnable_;
  30164. return *this;
  30165. }
  30166. PipelineDepthStencilStateCreateInfo & setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable_ ) VULKAN_HPP_NOEXCEPT
  30167. {
  30168. stencilTestEnable = stencilTestEnable_;
  30169. return *this;
  30170. }
  30171. PipelineDepthStencilStateCreateInfo & setFront( VULKAN_HPP_NAMESPACE::StencilOpState const & front_ ) VULKAN_HPP_NOEXCEPT
  30172. {
  30173. front = front_;
  30174. return *this;
  30175. }
  30176. PipelineDepthStencilStateCreateInfo & setBack( VULKAN_HPP_NAMESPACE::StencilOpState const & back_ ) VULKAN_HPP_NOEXCEPT
  30177. {
  30178. back = back_;
  30179. return *this;
  30180. }
  30181. PipelineDepthStencilStateCreateInfo & setMinDepthBounds( float minDepthBounds_ ) VULKAN_HPP_NOEXCEPT
  30182. {
  30183. minDepthBounds = minDepthBounds_;
  30184. return *this;
  30185. }
  30186. PipelineDepthStencilStateCreateInfo & setMaxDepthBounds( float maxDepthBounds_ ) VULKAN_HPP_NOEXCEPT
  30187. {
  30188. maxDepthBounds = maxDepthBounds_;
  30189. return *this;
  30190. }
  30191. operator VkPipelineDepthStencilStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30192. {
  30193. return *reinterpret_cast<const VkPipelineDepthStencilStateCreateInfo*>( this );
  30194. }
  30195. operator VkPipelineDepthStencilStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  30196. {
  30197. return *reinterpret_cast<VkPipelineDepthStencilStateCreateInfo*>( this );
  30198. }
  30199. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30200. auto operator<=>( PipelineDepthStencilStateCreateInfo const& ) const = default;
  30201. #else
  30202. bool operator==( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30203. {
  30204. return ( sType == rhs.sType )
  30205. && ( pNext == rhs.pNext )
  30206. && ( flags == rhs.flags )
  30207. && ( depthTestEnable == rhs.depthTestEnable )
  30208. && ( depthWriteEnable == rhs.depthWriteEnable )
  30209. && ( depthCompareOp == rhs.depthCompareOp )
  30210. && ( depthBoundsTestEnable == rhs.depthBoundsTestEnable )
  30211. && ( stencilTestEnable == rhs.stencilTestEnable )
  30212. && ( front == rhs.front )
  30213. && ( back == rhs.back )
  30214. && ( minDepthBounds == rhs.minDepthBounds )
  30215. && ( maxDepthBounds == rhs.maxDepthBounds );
  30216. }
  30217. bool operator!=( PipelineDepthStencilStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30218. {
  30219. return !operator==( rhs );
  30220. }
  30221. #endif
  30222. public:
  30223. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDepthStencilStateCreateInfo;
  30224. const void* pNext = {};
  30225. VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags flags = {};
  30226. VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable = {};
  30227. VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable = {};
  30228. VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
  30229. VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable = {};
  30230. VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable = {};
  30231. VULKAN_HPP_NAMESPACE::StencilOpState front = {};
  30232. VULKAN_HPP_NAMESPACE::StencilOpState back = {};
  30233. float minDepthBounds = {};
  30234. float maxDepthBounds = {};
  30235. };
  30236. static_assert( sizeof( PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ), "struct and wrapper have different size!" );
  30237. static_assert( std::is_standard_layout<PipelineDepthStencilStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30238. template <>
  30239. struct CppType<StructureType, StructureType::ePipelineDepthStencilStateCreateInfo>
  30240. {
  30241. using Type = PipelineDepthStencilStateCreateInfo;
  30242. };
  30243. struct PipelineColorBlendAttachmentState
  30244. {
  30245. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30246. VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState(VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ = {}, VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ = VULKAN_HPP_NAMESPACE::BlendFactor::eZero, VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ = VULKAN_HPP_NAMESPACE::BlendOp::eAdd, VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ = {}) VULKAN_HPP_NOEXCEPT
  30247. : blendEnable( blendEnable_ ), srcColorBlendFactor( srcColorBlendFactor_ ), dstColorBlendFactor( dstColorBlendFactor_ ), colorBlendOp( colorBlendOp_ ), srcAlphaBlendFactor( srcAlphaBlendFactor_ ), dstAlphaBlendFactor( dstAlphaBlendFactor_ ), alphaBlendOp( alphaBlendOp_ ), colorWriteMask( colorWriteMask_ )
  30248. {}
  30249. VULKAN_HPP_CONSTEXPR PipelineColorBlendAttachmentState( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30250. PipelineColorBlendAttachmentState( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
  30251. : PipelineColorBlendAttachmentState( *reinterpret_cast<PipelineColorBlendAttachmentState const *>( &rhs ) )
  30252. {}
  30253. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30254. VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAttachmentState & operator=( PipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30255. PipelineColorBlendAttachmentState & operator=( VkPipelineColorBlendAttachmentState const & rhs ) VULKAN_HPP_NOEXCEPT
  30256. {
  30257. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState const *>( &rhs );
  30258. return *this;
  30259. }
  30260. PipelineColorBlendAttachmentState & setBlendEnable( VULKAN_HPP_NAMESPACE::Bool32 blendEnable_ ) VULKAN_HPP_NOEXCEPT
  30261. {
  30262. blendEnable = blendEnable_;
  30263. return *this;
  30264. }
  30265. PipelineColorBlendAttachmentState & setSrcColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
  30266. {
  30267. srcColorBlendFactor = srcColorBlendFactor_;
  30268. return *this;
  30269. }
  30270. PipelineColorBlendAttachmentState & setDstColorBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor_ ) VULKAN_HPP_NOEXCEPT
  30271. {
  30272. dstColorBlendFactor = dstColorBlendFactor_;
  30273. return *this;
  30274. }
  30275. PipelineColorBlendAttachmentState & setColorBlendOp( VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp_ ) VULKAN_HPP_NOEXCEPT
  30276. {
  30277. colorBlendOp = colorBlendOp_;
  30278. return *this;
  30279. }
  30280. PipelineColorBlendAttachmentState & setSrcAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
  30281. {
  30282. srcAlphaBlendFactor = srcAlphaBlendFactor_;
  30283. return *this;
  30284. }
  30285. PipelineColorBlendAttachmentState & setDstAlphaBlendFactor( VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor_ ) VULKAN_HPP_NOEXCEPT
  30286. {
  30287. dstAlphaBlendFactor = dstAlphaBlendFactor_;
  30288. return *this;
  30289. }
  30290. PipelineColorBlendAttachmentState & setAlphaBlendOp( VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp_ ) VULKAN_HPP_NOEXCEPT
  30291. {
  30292. alphaBlendOp = alphaBlendOp_;
  30293. return *this;
  30294. }
  30295. PipelineColorBlendAttachmentState & setColorWriteMask( VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask_ ) VULKAN_HPP_NOEXCEPT
  30296. {
  30297. colorWriteMask = colorWriteMask_;
  30298. return *this;
  30299. }
  30300. operator VkPipelineColorBlendAttachmentState const&() const VULKAN_HPP_NOEXCEPT
  30301. {
  30302. return *reinterpret_cast<const VkPipelineColorBlendAttachmentState*>( this );
  30303. }
  30304. operator VkPipelineColorBlendAttachmentState &() VULKAN_HPP_NOEXCEPT
  30305. {
  30306. return *reinterpret_cast<VkPipelineColorBlendAttachmentState*>( this );
  30307. }
  30308. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30309. auto operator<=>( PipelineColorBlendAttachmentState const& ) const = default;
  30310. #else
  30311. bool operator==( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
  30312. {
  30313. return ( blendEnable == rhs.blendEnable )
  30314. && ( srcColorBlendFactor == rhs.srcColorBlendFactor )
  30315. && ( dstColorBlendFactor == rhs.dstColorBlendFactor )
  30316. && ( colorBlendOp == rhs.colorBlendOp )
  30317. && ( srcAlphaBlendFactor == rhs.srcAlphaBlendFactor )
  30318. && ( dstAlphaBlendFactor == rhs.dstAlphaBlendFactor )
  30319. && ( alphaBlendOp == rhs.alphaBlendOp )
  30320. && ( colorWriteMask == rhs.colorWriteMask );
  30321. }
  30322. bool operator!=( PipelineColorBlendAttachmentState const& rhs ) const VULKAN_HPP_NOEXCEPT
  30323. {
  30324. return !operator==( rhs );
  30325. }
  30326. #endif
  30327. public:
  30328. VULKAN_HPP_NAMESPACE::Bool32 blendEnable = {};
  30329. VULKAN_HPP_NAMESPACE::BlendFactor srcColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
  30330. VULKAN_HPP_NAMESPACE::BlendFactor dstColorBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
  30331. VULKAN_HPP_NAMESPACE::BlendOp colorBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
  30332. VULKAN_HPP_NAMESPACE::BlendFactor srcAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
  30333. VULKAN_HPP_NAMESPACE::BlendFactor dstAlphaBlendFactor = VULKAN_HPP_NAMESPACE::BlendFactor::eZero;
  30334. VULKAN_HPP_NAMESPACE::BlendOp alphaBlendOp = VULKAN_HPP_NAMESPACE::BlendOp::eAdd;
  30335. VULKAN_HPP_NAMESPACE::ColorComponentFlags colorWriteMask = {};
  30336. };
  30337. static_assert( sizeof( PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ), "struct and wrapper have different size!" );
  30338. static_assert( std::is_standard_layout<PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
  30339. struct PipelineColorBlendStateCreateInfo
  30340. {
  30341. static const bool allowDuplicate = false;
  30342. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendStateCreateInfo;
  30343. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30344. VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ = {}, VULKAN_HPP_NAMESPACE::LogicOp logicOp_ = VULKAN_HPP_NAMESPACE::LogicOp::eClear, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ = {}, std::array<float,4> const& blendConstants_ = {}) VULKAN_HPP_NOEXCEPT
  30345. : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), blendConstants( blendConstants_ )
  30346. {}
  30347. VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30348. PipelineColorBlendStateCreateInfo( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30349. : PipelineColorBlendStateCreateInfo( *reinterpret_cast<PipelineColorBlendStateCreateInfo const *>( &rhs ) )
  30350. {}
  30351. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30352. PipelineColorBlendStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_, VULKAN_HPP_NAMESPACE::LogicOp logicOp_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_, std::array<float,4> const& blendConstants_ = {} )
  30353. : flags( flags_ ), logicOpEnable( logicOpEnable_ ), logicOp( logicOp_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), blendConstants( blendConstants_ )
  30354. {}
  30355. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30356. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30357. VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendStateCreateInfo & operator=( PipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30358. PipelineColorBlendStateCreateInfo & operator=( VkPipelineColorBlendStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30359. {
  30360. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo const *>( &rhs );
  30361. return *this;
  30362. }
  30363. PipelineColorBlendStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30364. {
  30365. pNext = pNext_;
  30366. return *this;
  30367. }
  30368. PipelineColorBlendStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30369. {
  30370. flags = flags_;
  30371. return *this;
  30372. }
  30373. PipelineColorBlendStateCreateInfo & setLogicOpEnable( VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable_ ) VULKAN_HPP_NOEXCEPT
  30374. {
  30375. logicOpEnable = logicOpEnable_;
  30376. return *this;
  30377. }
  30378. PipelineColorBlendStateCreateInfo & setLogicOp( VULKAN_HPP_NAMESPACE::LogicOp logicOp_ ) VULKAN_HPP_NOEXCEPT
  30379. {
  30380. logicOp = logicOp_;
  30381. return *this;
  30382. }
  30383. PipelineColorBlendStateCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
  30384. {
  30385. attachmentCount = attachmentCount_;
  30386. return *this;
  30387. }
  30388. PipelineColorBlendStateCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments_ ) VULKAN_HPP_NOEXCEPT
  30389. {
  30390. pAttachments = pAttachments_;
  30391. return *this;
  30392. }
  30393. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30394. PipelineColorBlendStateCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState> const & attachments_ ) VULKAN_HPP_NOEXCEPT
  30395. {
  30396. attachmentCount = static_cast<uint32_t>( attachments_.size() );
  30397. pAttachments = attachments_.data();
  30398. return *this;
  30399. }
  30400. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30401. PipelineColorBlendStateCreateInfo & setBlendConstants( std::array<float,4> blendConstants_ ) VULKAN_HPP_NOEXCEPT
  30402. {
  30403. blendConstants = blendConstants_;
  30404. return *this;
  30405. }
  30406. operator VkPipelineColorBlendStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30407. {
  30408. return *reinterpret_cast<const VkPipelineColorBlendStateCreateInfo*>( this );
  30409. }
  30410. operator VkPipelineColorBlendStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  30411. {
  30412. return *reinterpret_cast<VkPipelineColorBlendStateCreateInfo*>( this );
  30413. }
  30414. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30415. auto operator<=>( PipelineColorBlendStateCreateInfo const& ) const = default;
  30416. #else
  30417. bool operator==( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30418. {
  30419. return ( sType == rhs.sType )
  30420. && ( pNext == rhs.pNext )
  30421. && ( flags == rhs.flags )
  30422. && ( logicOpEnable == rhs.logicOpEnable )
  30423. && ( logicOp == rhs.logicOp )
  30424. && ( attachmentCount == rhs.attachmentCount )
  30425. && ( pAttachments == rhs.pAttachments )
  30426. && ( blendConstants == rhs.blendConstants );
  30427. }
  30428. bool operator!=( PipelineColorBlendStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30429. {
  30430. return !operator==( rhs );
  30431. }
  30432. #endif
  30433. public:
  30434. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendStateCreateInfo;
  30435. const void* pNext = {};
  30436. VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags flags = {};
  30437. VULKAN_HPP_NAMESPACE::Bool32 logicOpEnable = {};
  30438. VULKAN_HPP_NAMESPACE::LogicOp logicOp = VULKAN_HPP_NAMESPACE::LogicOp::eClear;
  30439. uint32_t attachmentCount = {};
  30440. const VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState* pAttachments = {};
  30441. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 4> blendConstants = {};
  30442. };
  30443. static_assert( sizeof( PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ), "struct and wrapper have different size!" );
  30444. static_assert( std::is_standard_layout<PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30445. template <>
  30446. struct CppType<StructureType, StructureType::ePipelineColorBlendStateCreateInfo>
  30447. {
  30448. using Type = PipelineColorBlendStateCreateInfo;
  30449. };
  30450. struct PipelineDynamicStateCreateInfo
  30451. {
  30452. static const bool allowDuplicate = false;
  30453. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDynamicStateCreateInfo;
  30454. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30455. VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo(VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ = {}, uint32_t dynamicStateCount_ = {}, const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ = {}) VULKAN_HPP_NOEXCEPT
  30456. : flags( flags_ ), dynamicStateCount( dynamicStateCount_ ), pDynamicStates( pDynamicStates_ )
  30457. {}
  30458. VULKAN_HPP_CONSTEXPR PipelineDynamicStateCreateInfo( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30459. PipelineDynamicStateCreateInfo( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30460. : PipelineDynamicStateCreateInfo( *reinterpret_cast<PipelineDynamicStateCreateInfo const *>( &rhs ) )
  30461. {}
  30462. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30463. PipelineDynamicStateCreateInfo( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ )
  30464. : flags( flags_ ), dynamicStateCount( static_cast<uint32_t>( dynamicStates_.size() ) ), pDynamicStates( dynamicStates_.data() )
  30465. {}
  30466. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30467. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30468. VULKAN_HPP_CONSTEXPR_14 PipelineDynamicStateCreateInfo & operator=( PipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30469. PipelineDynamicStateCreateInfo & operator=( VkPipelineDynamicStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30470. {
  30471. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo const *>( &rhs );
  30472. return *this;
  30473. }
  30474. PipelineDynamicStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30475. {
  30476. pNext = pNext_;
  30477. return *this;
  30478. }
  30479. PipelineDynamicStateCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30480. {
  30481. flags = flags_;
  30482. return *this;
  30483. }
  30484. PipelineDynamicStateCreateInfo & setDynamicStateCount( uint32_t dynamicStateCount_ ) VULKAN_HPP_NOEXCEPT
  30485. {
  30486. dynamicStateCount = dynamicStateCount_;
  30487. return *this;
  30488. }
  30489. PipelineDynamicStateCreateInfo & setPDynamicStates( const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates_ ) VULKAN_HPP_NOEXCEPT
  30490. {
  30491. pDynamicStates = pDynamicStates_;
  30492. return *this;
  30493. }
  30494. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30495. PipelineDynamicStateCreateInfo & setDynamicStates( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DynamicState> const & dynamicStates_ ) VULKAN_HPP_NOEXCEPT
  30496. {
  30497. dynamicStateCount = static_cast<uint32_t>( dynamicStates_.size() );
  30498. pDynamicStates = dynamicStates_.data();
  30499. return *this;
  30500. }
  30501. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30502. operator VkPipelineDynamicStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30503. {
  30504. return *reinterpret_cast<const VkPipelineDynamicStateCreateInfo*>( this );
  30505. }
  30506. operator VkPipelineDynamicStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  30507. {
  30508. return *reinterpret_cast<VkPipelineDynamicStateCreateInfo*>( this );
  30509. }
  30510. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30511. auto operator<=>( PipelineDynamicStateCreateInfo const& ) const = default;
  30512. #else
  30513. bool operator==( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30514. {
  30515. return ( sType == rhs.sType )
  30516. && ( pNext == rhs.pNext )
  30517. && ( flags == rhs.flags )
  30518. && ( dynamicStateCount == rhs.dynamicStateCount )
  30519. && ( pDynamicStates == rhs.pDynamicStates );
  30520. }
  30521. bool operator!=( PipelineDynamicStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30522. {
  30523. return !operator==( rhs );
  30524. }
  30525. #endif
  30526. public:
  30527. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDynamicStateCreateInfo;
  30528. const void* pNext = {};
  30529. VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags flags = {};
  30530. uint32_t dynamicStateCount = {};
  30531. const VULKAN_HPP_NAMESPACE::DynamicState* pDynamicStates = {};
  30532. };
  30533. static_assert( sizeof( PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ), "struct and wrapper have different size!" );
  30534. static_assert( std::is_standard_layout<PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30535. template <>
  30536. struct CppType<StructureType, StructureType::ePipelineDynamicStateCreateInfo>
  30537. {
  30538. using Type = PipelineDynamicStateCreateInfo;
  30539. };
  30540. struct GraphicsPipelineCreateInfo
  30541. {
  30542. static const bool allowDuplicate = false;
  30543. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineCreateInfo;
  30544. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30545. VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
  30546. : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  30547. {}
  30548. VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30549. GraphicsPipelineCreateInfo( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30550. : GraphicsPipelineCreateInfo( *reinterpret_cast<GraphicsPipelineCreateInfo const *>( &rhs ) )
  30551. {}
  30552. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30553. GraphicsPipelineCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::RenderPass renderPass_ = {}, uint32_t subpass_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
  30554. : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pInputAssemblyState( pInputAssemblyState_ ), pTessellationState( pTessellationState_ ), pViewportState( pViewportState_ ), pRasterizationState( pRasterizationState_ ), pMultisampleState( pMultisampleState_ ), pDepthStencilState( pDepthStencilState_ ), pColorBlendState( pColorBlendState_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), renderPass( renderPass_ ), subpass( subpass_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  30555. {}
  30556. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30557. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30558. VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineCreateInfo & operator=( GraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30559. GraphicsPipelineCreateInfo & operator=( VkGraphicsPipelineCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30560. {
  30561. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo const *>( &rhs );
  30562. return *this;
  30563. }
  30564. GraphicsPipelineCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30565. {
  30566. pNext = pNext_;
  30567. return *this;
  30568. }
  30569. GraphicsPipelineCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30570. {
  30571. flags = flags_;
  30572. return *this;
  30573. }
  30574. GraphicsPipelineCreateInfo & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
  30575. {
  30576. stageCount = stageCount_;
  30577. return *this;
  30578. }
  30579. GraphicsPipelineCreateInfo & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
  30580. {
  30581. pStages = pStages_;
  30582. return *this;
  30583. }
  30584. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30585. GraphicsPipelineCreateInfo & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
  30586. {
  30587. stageCount = static_cast<uint32_t>( stages_.size() );
  30588. pStages = stages_.data();
  30589. return *this;
  30590. }
  30591. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30592. GraphicsPipelineCreateInfo & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
  30593. {
  30594. pVertexInputState = pVertexInputState_;
  30595. return *this;
  30596. }
  30597. GraphicsPipelineCreateInfo & setPInputAssemblyState( const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState_ ) VULKAN_HPP_NOEXCEPT
  30598. {
  30599. pInputAssemblyState = pInputAssemblyState_;
  30600. return *this;
  30601. }
  30602. GraphicsPipelineCreateInfo & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
  30603. {
  30604. pTessellationState = pTessellationState_;
  30605. return *this;
  30606. }
  30607. GraphicsPipelineCreateInfo & setPViewportState( const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState_ ) VULKAN_HPP_NOEXCEPT
  30608. {
  30609. pViewportState = pViewportState_;
  30610. return *this;
  30611. }
  30612. GraphicsPipelineCreateInfo & setPRasterizationState( const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState_ ) VULKAN_HPP_NOEXCEPT
  30613. {
  30614. pRasterizationState = pRasterizationState_;
  30615. return *this;
  30616. }
  30617. GraphicsPipelineCreateInfo & setPMultisampleState( const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState_ ) VULKAN_HPP_NOEXCEPT
  30618. {
  30619. pMultisampleState = pMultisampleState_;
  30620. return *this;
  30621. }
  30622. GraphicsPipelineCreateInfo & setPDepthStencilState( const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState_ ) VULKAN_HPP_NOEXCEPT
  30623. {
  30624. pDepthStencilState = pDepthStencilState_;
  30625. return *this;
  30626. }
  30627. GraphicsPipelineCreateInfo & setPColorBlendState( const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState_ ) VULKAN_HPP_NOEXCEPT
  30628. {
  30629. pColorBlendState = pColorBlendState_;
  30630. return *this;
  30631. }
  30632. GraphicsPipelineCreateInfo & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
  30633. {
  30634. pDynamicState = pDynamicState_;
  30635. return *this;
  30636. }
  30637. GraphicsPipelineCreateInfo & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
  30638. {
  30639. layout = layout_;
  30640. return *this;
  30641. }
  30642. GraphicsPipelineCreateInfo & setRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass_ ) VULKAN_HPP_NOEXCEPT
  30643. {
  30644. renderPass = renderPass_;
  30645. return *this;
  30646. }
  30647. GraphicsPipelineCreateInfo & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
  30648. {
  30649. subpass = subpass_;
  30650. return *this;
  30651. }
  30652. GraphicsPipelineCreateInfo & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
  30653. {
  30654. basePipelineHandle = basePipelineHandle_;
  30655. return *this;
  30656. }
  30657. GraphicsPipelineCreateInfo & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
  30658. {
  30659. basePipelineIndex = basePipelineIndex_;
  30660. return *this;
  30661. }
  30662. operator VkGraphicsPipelineCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30663. {
  30664. return *reinterpret_cast<const VkGraphicsPipelineCreateInfo*>( this );
  30665. }
  30666. operator VkGraphicsPipelineCreateInfo &() VULKAN_HPP_NOEXCEPT
  30667. {
  30668. return *reinterpret_cast<VkGraphicsPipelineCreateInfo*>( this );
  30669. }
  30670. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30671. auto operator<=>( GraphicsPipelineCreateInfo const& ) const = default;
  30672. #else
  30673. bool operator==( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30674. {
  30675. return ( sType == rhs.sType )
  30676. && ( pNext == rhs.pNext )
  30677. && ( flags == rhs.flags )
  30678. && ( stageCount == rhs.stageCount )
  30679. && ( pStages == rhs.pStages )
  30680. && ( pVertexInputState == rhs.pVertexInputState )
  30681. && ( pInputAssemblyState == rhs.pInputAssemblyState )
  30682. && ( pTessellationState == rhs.pTessellationState )
  30683. && ( pViewportState == rhs.pViewportState )
  30684. && ( pRasterizationState == rhs.pRasterizationState )
  30685. && ( pMultisampleState == rhs.pMultisampleState )
  30686. && ( pDepthStencilState == rhs.pDepthStencilState )
  30687. && ( pColorBlendState == rhs.pColorBlendState )
  30688. && ( pDynamicState == rhs.pDynamicState )
  30689. && ( layout == rhs.layout )
  30690. && ( renderPass == rhs.renderPass )
  30691. && ( subpass == rhs.subpass )
  30692. && ( basePipelineHandle == rhs.basePipelineHandle )
  30693. && ( basePipelineIndex == rhs.basePipelineIndex );
  30694. }
  30695. bool operator!=( GraphicsPipelineCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30696. {
  30697. return !operator==( rhs );
  30698. }
  30699. #endif
  30700. public:
  30701. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineCreateInfo;
  30702. const void* pNext = {};
  30703. VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
  30704. uint32_t stageCount = {};
  30705. const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
  30706. const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
  30707. const VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo* pInputAssemblyState = {};
  30708. const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
  30709. const VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo* pViewportState = {};
  30710. const VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo* pRasterizationState = {};
  30711. const VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo* pMultisampleState = {};
  30712. const VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo* pDepthStencilState = {};
  30713. const VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo* pColorBlendState = {};
  30714. const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
  30715. VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
  30716. VULKAN_HPP_NAMESPACE::RenderPass renderPass = {};
  30717. uint32_t subpass = {};
  30718. VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
  30719. int32_t basePipelineIndex = {};
  30720. };
  30721. static_assert( sizeof( GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ), "struct and wrapper have different size!" );
  30722. static_assert( std::is_standard_layout<GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30723. template <>
  30724. struct CppType<StructureType, StructureType::eGraphicsPipelineCreateInfo>
  30725. {
  30726. using Type = GraphicsPipelineCreateInfo;
  30727. };
  30728. struct ImageCreateInfo
  30729. {
  30730. static const bool allowDuplicate = false;
  30731. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageCreateInfo;
  30732. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30733. VULKAN_HPP_CONSTEXPR ImageCreateInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageType imageType_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::Extent3D extent_ = {}, uint32_t mipLevels_ = {}, uint32_t arrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined) VULKAN_HPP_NOEXCEPT
  30734. : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), initialLayout( initialLayout_ )
  30735. {}
  30736. VULKAN_HPP_CONSTEXPR ImageCreateInfo( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30737. ImageCreateInfo( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30738. : ImageCreateInfo( *reinterpret_cast<ImageCreateInfo const *>( &rhs ) )
  30739. {}
  30740. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30741. ImageCreateInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageType imageType_, VULKAN_HPP_NAMESPACE::Format format_, VULKAN_HPP_NAMESPACE::Extent3D extent_, uint32_t mipLevels_, uint32_t arrayLayers_, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_, VULKAN_HPP_NAMESPACE::ImageTiling tiling_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined )
  30742. : flags( flags_ ), imageType( imageType_ ), format( format_ ), extent( extent_ ), mipLevels( mipLevels_ ), arrayLayers( arrayLayers_ ), samples( samples_ ), tiling( tiling_ ), usage( usage_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), initialLayout( initialLayout_ )
  30743. {}
  30744. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30745. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30746. VULKAN_HPP_CONSTEXPR_14 ImageCreateInfo & operator=( ImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30747. ImageCreateInfo & operator=( VkImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30748. {
  30749. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageCreateInfo const *>( &rhs );
  30750. return *this;
  30751. }
  30752. ImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30753. {
  30754. pNext = pNext_;
  30755. return *this;
  30756. }
  30757. ImageCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30758. {
  30759. flags = flags_;
  30760. return *this;
  30761. }
  30762. ImageCreateInfo & setImageType( VULKAN_HPP_NAMESPACE::ImageType imageType_ ) VULKAN_HPP_NOEXCEPT
  30763. {
  30764. imageType = imageType_;
  30765. return *this;
  30766. }
  30767. ImageCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  30768. {
  30769. format = format_;
  30770. return *this;
  30771. }
  30772. ImageCreateInfo & setExtent( VULKAN_HPP_NAMESPACE::Extent3D const & extent_ ) VULKAN_HPP_NOEXCEPT
  30773. {
  30774. extent = extent_;
  30775. return *this;
  30776. }
  30777. ImageCreateInfo & setMipLevels( uint32_t mipLevels_ ) VULKAN_HPP_NOEXCEPT
  30778. {
  30779. mipLevels = mipLevels_;
  30780. return *this;
  30781. }
  30782. ImageCreateInfo & setArrayLayers( uint32_t arrayLayers_ ) VULKAN_HPP_NOEXCEPT
  30783. {
  30784. arrayLayers = arrayLayers_;
  30785. return *this;
  30786. }
  30787. ImageCreateInfo & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
  30788. {
  30789. samples = samples_;
  30790. return *this;
  30791. }
  30792. ImageCreateInfo & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
  30793. {
  30794. tiling = tiling_;
  30795. return *this;
  30796. }
  30797. ImageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  30798. {
  30799. usage = usage_;
  30800. return *this;
  30801. }
  30802. ImageCreateInfo & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
  30803. {
  30804. sharingMode = sharingMode_;
  30805. return *this;
  30806. }
  30807. ImageCreateInfo & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
  30808. {
  30809. queueFamilyIndexCount = queueFamilyIndexCount_;
  30810. return *this;
  30811. }
  30812. ImageCreateInfo & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  30813. {
  30814. pQueueFamilyIndices = pQueueFamilyIndices_;
  30815. return *this;
  30816. }
  30817. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30818. ImageCreateInfo & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  30819. {
  30820. queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
  30821. pQueueFamilyIndices = queueFamilyIndices_.data();
  30822. return *this;
  30823. }
  30824. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30825. ImageCreateInfo & setInitialLayout( VULKAN_HPP_NAMESPACE::ImageLayout initialLayout_ ) VULKAN_HPP_NOEXCEPT
  30826. {
  30827. initialLayout = initialLayout_;
  30828. return *this;
  30829. }
  30830. operator VkImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30831. {
  30832. return *reinterpret_cast<const VkImageCreateInfo*>( this );
  30833. }
  30834. operator VkImageCreateInfo &() VULKAN_HPP_NOEXCEPT
  30835. {
  30836. return *reinterpret_cast<VkImageCreateInfo*>( this );
  30837. }
  30838. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30839. auto operator<=>( ImageCreateInfo const& ) const = default;
  30840. #else
  30841. bool operator==( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30842. {
  30843. return ( sType == rhs.sType )
  30844. && ( pNext == rhs.pNext )
  30845. && ( flags == rhs.flags )
  30846. && ( imageType == rhs.imageType )
  30847. && ( format == rhs.format )
  30848. && ( extent == rhs.extent )
  30849. && ( mipLevels == rhs.mipLevels )
  30850. && ( arrayLayers == rhs.arrayLayers )
  30851. && ( samples == rhs.samples )
  30852. && ( tiling == rhs.tiling )
  30853. && ( usage == rhs.usage )
  30854. && ( sharingMode == rhs.sharingMode )
  30855. && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
  30856. && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
  30857. && ( initialLayout == rhs.initialLayout );
  30858. }
  30859. bool operator!=( ImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30860. {
  30861. return !operator==( rhs );
  30862. }
  30863. #endif
  30864. public:
  30865. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageCreateInfo;
  30866. const void* pNext = {};
  30867. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
  30868. VULKAN_HPP_NAMESPACE::ImageType imageType = VULKAN_HPP_NAMESPACE::ImageType::e1D;
  30869. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  30870. VULKAN_HPP_NAMESPACE::Extent3D extent = {};
  30871. uint32_t mipLevels = {};
  30872. uint32_t arrayLayers = {};
  30873. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  30874. VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
  30875. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  30876. VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
  30877. uint32_t queueFamilyIndexCount = {};
  30878. const uint32_t* pQueueFamilyIndices = {};
  30879. VULKAN_HPP_NAMESPACE::ImageLayout initialLayout = VULKAN_HPP_NAMESPACE::ImageLayout::eUndefined;
  30880. };
  30881. static_assert( sizeof( ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
  30882. static_assert( std::is_standard_layout<ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30883. template <>
  30884. struct CppType<StructureType, StructureType::eImageCreateInfo>
  30885. {
  30886. using Type = ImageCreateInfo;
  30887. };
  30888. struct ImageViewCreateInfo
  30889. {
  30890. static const bool allowDuplicate = false;
  30891. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewCreateInfo;
  30892. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30893. VULKAN_HPP_CONSTEXPR ImageViewCreateInfo(VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::ImageViewType viewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange_ = {}) VULKAN_HPP_NOEXCEPT
  30894. : flags( flags_ ), image( image_ ), viewType( viewType_ ), format( format_ ), components( components_ ), subresourceRange( subresourceRange_ )
  30895. {}
  30896. VULKAN_HPP_CONSTEXPR ImageViewCreateInfo( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30897. ImageViewCreateInfo( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30898. : ImageViewCreateInfo( *reinterpret_cast<ImageViewCreateInfo const *>( &rhs ) )
  30899. {}
  30900. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30901. VULKAN_HPP_CONSTEXPR_14 ImageViewCreateInfo & operator=( ImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30902. ImageViewCreateInfo & operator=( VkImageViewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  30903. {
  30904. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo const *>( &rhs );
  30905. return *this;
  30906. }
  30907. ImageViewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  30908. {
  30909. pNext = pNext_;
  30910. return *this;
  30911. }
  30912. ImageViewCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  30913. {
  30914. flags = flags_;
  30915. return *this;
  30916. }
  30917. ImageViewCreateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  30918. {
  30919. image = image_;
  30920. return *this;
  30921. }
  30922. ImageViewCreateInfo & setViewType( VULKAN_HPP_NAMESPACE::ImageViewType viewType_ ) VULKAN_HPP_NOEXCEPT
  30923. {
  30924. viewType = viewType_;
  30925. return *this;
  30926. }
  30927. ImageViewCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  30928. {
  30929. format = format_;
  30930. return *this;
  30931. }
  30932. ImageViewCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
  30933. {
  30934. components = components_;
  30935. return *this;
  30936. }
  30937. ImageViewCreateInfo & setSubresourceRange( VULKAN_HPP_NAMESPACE::ImageSubresourceRange const & subresourceRange_ ) VULKAN_HPP_NOEXCEPT
  30938. {
  30939. subresourceRange = subresourceRange_;
  30940. return *this;
  30941. }
  30942. operator VkImageViewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  30943. {
  30944. return *reinterpret_cast<const VkImageViewCreateInfo*>( this );
  30945. }
  30946. operator VkImageViewCreateInfo &() VULKAN_HPP_NOEXCEPT
  30947. {
  30948. return *reinterpret_cast<VkImageViewCreateInfo*>( this );
  30949. }
  30950. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  30951. auto operator<=>( ImageViewCreateInfo const& ) const = default;
  30952. #else
  30953. bool operator==( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30954. {
  30955. return ( sType == rhs.sType )
  30956. && ( pNext == rhs.pNext )
  30957. && ( flags == rhs.flags )
  30958. && ( image == rhs.image )
  30959. && ( viewType == rhs.viewType )
  30960. && ( format == rhs.format )
  30961. && ( components == rhs.components )
  30962. && ( subresourceRange == rhs.subresourceRange );
  30963. }
  30964. bool operator!=( ImageViewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  30965. {
  30966. return !operator==( rhs );
  30967. }
  30968. #endif
  30969. public:
  30970. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewCreateInfo;
  30971. const void* pNext = {};
  30972. VULKAN_HPP_NAMESPACE::ImageViewCreateFlags flags = {};
  30973. VULKAN_HPP_NAMESPACE::Image image = {};
  30974. VULKAN_HPP_NAMESPACE::ImageViewType viewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
  30975. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  30976. VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
  30977. VULKAN_HPP_NAMESPACE::ImageSubresourceRange subresourceRange = {};
  30978. };
  30979. static_assert( sizeof( ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
  30980. static_assert( std::is_standard_layout<ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
  30981. template <>
  30982. struct CppType<StructureType, StructureType::eImageViewCreateInfo>
  30983. {
  30984. using Type = ImageViewCreateInfo;
  30985. };
  30986. struct IndirectCommandsLayoutTokenNV
  30987. {
  30988. static const bool allowDuplicate = false;
  30989. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutTokenNV;
  30990. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  30991. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV(VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup, uint32_t stream_ = {}, uint32_t offset_ = {}, uint32_t vertexBindingUnit_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ = {}, uint32_t pushconstantOffset_ = {}, uint32_t pushconstantSize_ = {}, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ = {}, uint32_t indexTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ = {}, const uint32_t* pIndexTypeValues_ = {}) VULKAN_HPP_NOEXCEPT
  30992. : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( indexTypeCount_ ), pIndexTypes( pIndexTypes_ ), pIndexTypeValues( pIndexTypeValues_ )
  30993. {}
  30994. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutTokenNV( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  30995. IndirectCommandsLayoutTokenNV( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
  30996. : IndirectCommandsLayoutTokenNV( *reinterpret_cast<IndirectCommandsLayoutTokenNV const *>( &rhs ) )
  30997. {}
  30998. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  30999. IndirectCommandsLayoutTokenNV( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_, uint32_t stream_, uint32_t offset_, uint32_t vertexBindingUnit_, VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_, VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_, VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_, uint32_t pushconstantOffset_, uint32_t pushconstantSize_, VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ = {} )
  31000. : tokenType( tokenType_ ), stream( stream_ ), offset( offset_ ), vertexBindingUnit( vertexBindingUnit_ ), vertexDynamicStride( vertexDynamicStride_ ), pushconstantPipelineLayout( pushconstantPipelineLayout_ ), pushconstantShaderStageFlags( pushconstantShaderStageFlags_ ), pushconstantOffset( pushconstantOffset_ ), pushconstantSize( pushconstantSize_ ), indirectStateFlags( indirectStateFlags_ ), indexTypeCount( static_cast<uint32_t>( indexTypes_.size() ) ), pIndexTypes( indexTypes_.data() ), pIndexTypeValues( indexTypeValues_.data() )
  31001. {
  31002. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  31003. VULKAN_HPP_ASSERT( indexTypes_.size() == indexTypeValues_.size() );
  31004. #else
  31005. if ( indexTypes_.size() != indexTypeValues_.size() )
  31006. {
  31007. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::IndirectCommandsLayoutTokenNV::IndirectCommandsLayoutTokenNV: indexTypes_.size() != indexTypeValues_.size()" );
  31008. }
  31009. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  31010. }
  31011. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31012. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31013. VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutTokenNV & operator=( IndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31014. IndirectCommandsLayoutTokenNV & operator=( VkIndirectCommandsLayoutTokenNV const & rhs ) VULKAN_HPP_NOEXCEPT
  31015. {
  31016. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV const *>( &rhs );
  31017. return *this;
  31018. }
  31019. IndirectCommandsLayoutTokenNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31020. {
  31021. pNext = pNext_;
  31022. return *this;
  31023. }
  31024. IndirectCommandsLayoutTokenNV & setTokenType( VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType_ ) VULKAN_HPP_NOEXCEPT
  31025. {
  31026. tokenType = tokenType_;
  31027. return *this;
  31028. }
  31029. IndirectCommandsLayoutTokenNV & setStream( uint32_t stream_ ) VULKAN_HPP_NOEXCEPT
  31030. {
  31031. stream = stream_;
  31032. return *this;
  31033. }
  31034. IndirectCommandsLayoutTokenNV & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
  31035. {
  31036. offset = offset_;
  31037. return *this;
  31038. }
  31039. IndirectCommandsLayoutTokenNV & setVertexBindingUnit( uint32_t vertexBindingUnit_ ) VULKAN_HPP_NOEXCEPT
  31040. {
  31041. vertexBindingUnit = vertexBindingUnit_;
  31042. return *this;
  31043. }
  31044. IndirectCommandsLayoutTokenNV & setVertexDynamicStride( VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride_ ) VULKAN_HPP_NOEXCEPT
  31045. {
  31046. vertexDynamicStride = vertexDynamicStride_;
  31047. return *this;
  31048. }
  31049. IndirectCommandsLayoutTokenNV & setPushconstantPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout_ ) VULKAN_HPP_NOEXCEPT
  31050. {
  31051. pushconstantPipelineLayout = pushconstantPipelineLayout_;
  31052. return *this;
  31053. }
  31054. IndirectCommandsLayoutTokenNV & setPushconstantShaderStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags_ ) VULKAN_HPP_NOEXCEPT
  31055. {
  31056. pushconstantShaderStageFlags = pushconstantShaderStageFlags_;
  31057. return *this;
  31058. }
  31059. IndirectCommandsLayoutTokenNV & setPushconstantOffset( uint32_t pushconstantOffset_ ) VULKAN_HPP_NOEXCEPT
  31060. {
  31061. pushconstantOffset = pushconstantOffset_;
  31062. return *this;
  31063. }
  31064. IndirectCommandsLayoutTokenNV & setPushconstantSize( uint32_t pushconstantSize_ ) VULKAN_HPP_NOEXCEPT
  31065. {
  31066. pushconstantSize = pushconstantSize_;
  31067. return *this;
  31068. }
  31069. IndirectCommandsLayoutTokenNV & setIndirectStateFlags( VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags_ ) VULKAN_HPP_NOEXCEPT
  31070. {
  31071. indirectStateFlags = indirectStateFlags_;
  31072. return *this;
  31073. }
  31074. IndirectCommandsLayoutTokenNV & setIndexTypeCount( uint32_t indexTypeCount_ ) VULKAN_HPP_NOEXCEPT
  31075. {
  31076. indexTypeCount = indexTypeCount_;
  31077. return *this;
  31078. }
  31079. IndirectCommandsLayoutTokenNV & setPIndexTypes( const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes_ ) VULKAN_HPP_NOEXCEPT
  31080. {
  31081. pIndexTypes = pIndexTypes_;
  31082. return *this;
  31083. }
  31084. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31085. IndirectCommandsLayoutTokenNV & setIndexTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndexType> const & indexTypes_ ) VULKAN_HPP_NOEXCEPT
  31086. {
  31087. indexTypeCount = static_cast<uint32_t>( indexTypes_.size() );
  31088. pIndexTypes = indexTypes_.data();
  31089. return *this;
  31090. }
  31091. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31092. IndirectCommandsLayoutTokenNV & setPIndexTypeValues( const uint32_t* pIndexTypeValues_ ) VULKAN_HPP_NOEXCEPT
  31093. {
  31094. pIndexTypeValues = pIndexTypeValues_;
  31095. return *this;
  31096. }
  31097. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31098. IndirectCommandsLayoutTokenNV & setIndexTypeValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & indexTypeValues_ ) VULKAN_HPP_NOEXCEPT
  31099. {
  31100. indexTypeCount = static_cast<uint32_t>( indexTypeValues_.size() );
  31101. pIndexTypeValues = indexTypeValues_.data();
  31102. return *this;
  31103. }
  31104. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31105. operator VkIndirectCommandsLayoutTokenNV const&() const VULKAN_HPP_NOEXCEPT
  31106. {
  31107. return *reinterpret_cast<const VkIndirectCommandsLayoutTokenNV*>( this );
  31108. }
  31109. operator VkIndirectCommandsLayoutTokenNV &() VULKAN_HPP_NOEXCEPT
  31110. {
  31111. return *reinterpret_cast<VkIndirectCommandsLayoutTokenNV*>( this );
  31112. }
  31113. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31114. auto operator<=>( IndirectCommandsLayoutTokenNV const& ) const = default;
  31115. #else
  31116. bool operator==( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  31117. {
  31118. return ( sType == rhs.sType )
  31119. && ( pNext == rhs.pNext )
  31120. && ( tokenType == rhs.tokenType )
  31121. && ( stream == rhs.stream )
  31122. && ( offset == rhs.offset )
  31123. && ( vertexBindingUnit == rhs.vertexBindingUnit )
  31124. && ( vertexDynamicStride == rhs.vertexDynamicStride )
  31125. && ( pushconstantPipelineLayout == rhs.pushconstantPipelineLayout )
  31126. && ( pushconstantShaderStageFlags == rhs.pushconstantShaderStageFlags )
  31127. && ( pushconstantOffset == rhs.pushconstantOffset )
  31128. && ( pushconstantSize == rhs.pushconstantSize )
  31129. && ( indirectStateFlags == rhs.indirectStateFlags )
  31130. && ( indexTypeCount == rhs.indexTypeCount )
  31131. && ( pIndexTypes == rhs.pIndexTypes )
  31132. && ( pIndexTypeValues == rhs.pIndexTypeValues );
  31133. }
  31134. bool operator!=( IndirectCommandsLayoutTokenNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  31135. {
  31136. return !operator==( rhs );
  31137. }
  31138. #endif
  31139. public:
  31140. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutTokenNV;
  31141. const void* pNext = {};
  31142. VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV tokenType = VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV::eShaderGroup;
  31143. uint32_t stream = {};
  31144. uint32_t offset = {};
  31145. uint32_t vertexBindingUnit = {};
  31146. VULKAN_HPP_NAMESPACE::Bool32 vertexDynamicStride = {};
  31147. VULKAN_HPP_NAMESPACE::PipelineLayout pushconstantPipelineLayout = {};
  31148. VULKAN_HPP_NAMESPACE::ShaderStageFlags pushconstantShaderStageFlags = {};
  31149. uint32_t pushconstantOffset = {};
  31150. uint32_t pushconstantSize = {};
  31151. VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV indirectStateFlags = {};
  31152. uint32_t indexTypeCount = {};
  31153. const VULKAN_HPP_NAMESPACE::IndexType* pIndexTypes = {};
  31154. const uint32_t* pIndexTypeValues = {};
  31155. };
  31156. static_assert( sizeof( IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ), "struct and wrapper have different size!" );
  31157. static_assert( std::is_standard_layout<IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
  31158. template <>
  31159. struct CppType<StructureType, StructureType::eIndirectCommandsLayoutTokenNV>
  31160. {
  31161. using Type = IndirectCommandsLayoutTokenNV;
  31162. };
  31163. struct IndirectCommandsLayoutCreateInfoNV
  31164. {
  31165. static const bool allowDuplicate = false;
  31166. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
  31167. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31168. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t tokenCount_ = {}, const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ = {}, uint32_t streamCount_ = {}, const uint32_t* pStreamStrides_ = {}) VULKAN_HPP_NOEXCEPT
  31169. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( tokenCount_ ), pTokens( pTokens_ ), streamCount( streamCount_ ), pStreamStrides( pStreamStrides_ )
  31170. {}
  31171. VULKAN_HPP_CONSTEXPR IndirectCommandsLayoutCreateInfoNV( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31172. IndirectCommandsLayoutCreateInfoNV( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  31173. : IndirectCommandsLayoutCreateInfoNV( *reinterpret_cast<IndirectCommandsLayoutCreateInfoNV const *>( &rhs ) )
  31174. {}
  31175. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31176. IndirectCommandsLayoutCreateInfoNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ = {} )
  31177. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), tokenCount( static_cast<uint32_t>( tokens_.size() ) ), pTokens( tokens_.data() ), streamCount( static_cast<uint32_t>( streamStrides_.size() ) ), pStreamStrides( streamStrides_.data() )
  31178. {}
  31179. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31180. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31181. VULKAN_HPP_CONSTEXPR_14 IndirectCommandsLayoutCreateInfoNV & operator=( IndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31182. IndirectCommandsLayoutCreateInfoNV & operator=( VkIndirectCommandsLayoutCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  31183. {
  31184. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV const *>( &rhs );
  31185. return *this;
  31186. }
  31187. IndirectCommandsLayoutCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31188. {
  31189. pNext = pNext_;
  31190. return *this;
  31191. }
  31192. IndirectCommandsLayoutCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  31193. {
  31194. flags = flags_;
  31195. return *this;
  31196. }
  31197. IndirectCommandsLayoutCreateInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  31198. {
  31199. pipelineBindPoint = pipelineBindPoint_;
  31200. return *this;
  31201. }
  31202. IndirectCommandsLayoutCreateInfoNV & setTokenCount( uint32_t tokenCount_ ) VULKAN_HPP_NOEXCEPT
  31203. {
  31204. tokenCount = tokenCount_;
  31205. return *this;
  31206. }
  31207. IndirectCommandsLayoutCreateInfoNV & setPTokens( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens_ ) VULKAN_HPP_NOEXCEPT
  31208. {
  31209. pTokens = pTokens_;
  31210. return *this;
  31211. }
  31212. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31213. IndirectCommandsLayoutCreateInfoNV & setTokens( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV> const & tokens_ ) VULKAN_HPP_NOEXCEPT
  31214. {
  31215. tokenCount = static_cast<uint32_t>( tokens_.size() );
  31216. pTokens = tokens_.data();
  31217. return *this;
  31218. }
  31219. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31220. IndirectCommandsLayoutCreateInfoNV & setStreamCount( uint32_t streamCount_ ) VULKAN_HPP_NOEXCEPT
  31221. {
  31222. streamCount = streamCount_;
  31223. return *this;
  31224. }
  31225. IndirectCommandsLayoutCreateInfoNV & setPStreamStrides( const uint32_t* pStreamStrides_ ) VULKAN_HPP_NOEXCEPT
  31226. {
  31227. pStreamStrides = pStreamStrides_;
  31228. return *this;
  31229. }
  31230. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31231. IndirectCommandsLayoutCreateInfoNV & setStreamStrides( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & streamStrides_ ) VULKAN_HPP_NOEXCEPT
  31232. {
  31233. streamCount = static_cast<uint32_t>( streamStrides_.size() );
  31234. pStreamStrides = streamStrides_.data();
  31235. return *this;
  31236. }
  31237. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31238. operator VkIndirectCommandsLayoutCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  31239. {
  31240. return *reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV*>( this );
  31241. }
  31242. operator VkIndirectCommandsLayoutCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  31243. {
  31244. return *reinterpret_cast<VkIndirectCommandsLayoutCreateInfoNV*>( this );
  31245. }
  31246. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31247. auto operator<=>( IndirectCommandsLayoutCreateInfoNV const& ) const = default;
  31248. #else
  31249. bool operator==( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  31250. {
  31251. return ( sType == rhs.sType )
  31252. && ( pNext == rhs.pNext )
  31253. && ( flags == rhs.flags )
  31254. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  31255. && ( tokenCount == rhs.tokenCount )
  31256. && ( pTokens == rhs.pTokens )
  31257. && ( streamCount == rhs.streamCount )
  31258. && ( pStreamStrides == rhs.pStreamStrides );
  31259. }
  31260. bool operator!=( IndirectCommandsLayoutCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  31261. {
  31262. return !operator==( rhs );
  31263. }
  31264. #endif
  31265. public:
  31266. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIndirectCommandsLayoutCreateInfoNV;
  31267. const void* pNext = {};
  31268. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV flags = {};
  31269. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  31270. uint32_t tokenCount = {};
  31271. const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV* pTokens = {};
  31272. uint32_t streamCount = {};
  31273. const uint32_t* pStreamStrides = {};
  31274. };
  31275. static_assert( sizeof( IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ), "struct and wrapper have different size!" );
  31276. static_assert( std::is_standard_layout<IndirectCommandsLayoutCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  31277. template <>
  31278. struct CppType<StructureType, StructureType::eIndirectCommandsLayoutCreateInfoNV>
  31279. {
  31280. using Type = IndirectCommandsLayoutCreateInfoNV;
  31281. };
  31282. struct PipelineCacheCreateInfo
  31283. {
  31284. static const bool allowDuplicate = false;
  31285. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCacheCreateInfo;
  31286. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31287. VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo(VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
  31288. : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
  31289. {}
  31290. VULKAN_HPP_CONSTEXPR PipelineCacheCreateInfo( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31291. PipelineCacheCreateInfo( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31292. : PipelineCacheCreateInfo( *reinterpret_cast<PipelineCacheCreateInfo const *>( &rhs ) )
  31293. {}
  31294. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31295. template <typename T>
  31296. PipelineCacheCreateInfo( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
  31297. : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
  31298. {}
  31299. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31300. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31301. VULKAN_HPP_CONSTEXPR_14 PipelineCacheCreateInfo & operator=( PipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31302. PipelineCacheCreateInfo & operator=( VkPipelineCacheCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31303. {
  31304. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo const *>( &rhs );
  31305. return *this;
  31306. }
  31307. PipelineCacheCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31308. {
  31309. pNext = pNext_;
  31310. return *this;
  31311. }
  31312. PipelineCacheCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  31313. {
  31314. flags = flags_;
  31315. return *this;
  31316. }
  31317. PipelineCacheCreateInfo & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
  31318. {
  31319. initialDataSize = initialDataSize_;
  31320. return *this;
  31321. }
  31322. PipelineCacheCreateInfo & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
  31323. {
  31324. pInitialData = pInitialData_;
  31325. return *this;
  31326. }
  31327. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31328. template <typename T>
  31329. PipelineCacheCreateInfo & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
  31330. {
  31331. initialDataSize = initialData_.size() * sizeof(T);
  31332. pInitialData = initialData_.data();
  31333. return *this;
  31334. }
  31335. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31336. operator VkPipelineCacheCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  31337. {
  31338. return *reinterpret_cast<const VkPipelineCacheCreateInfo*>( this );
  31339. }
  31340. operator VkPipelineCacheCreateInfo &() VULKAN_HPP_NOEXCEPT
  31341. {
  31342. return *reinterpret_cast<VkPipelineCacheCreateInfo*>( this );
  31343. }
  31344. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31345. auto operator<=>( PipelineCacheCreateInfo const& ) const = default;
  31346. #else
  31347. bool operator==( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31348. {
  31349. return ( sType == rhs.sType )
  31350. && ( pNext == rhs.pNext )
  31351. && ( flags == rhs.flags )
  31352. && ( initialDataSize == rhs.initialDataSize )
  31353. && ( pInitialData == rhs.pInitialData );
  31354. }
  31355. bool operator!=( PipelineCacheCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31356. {
  31357. return !operator==( rhs );
  31358. }
  31359. #endif
  31360. public:
  31361. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCacheCreateInfo;
  31362. const void* pNext = {};
  31363. VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags flags = {};
  31364. size_t initialDataSize = {};
  31365. const void* pInitialData = {};
  31366. };
  31367. static_assert( sizeof( PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ), "struct and wrapper have different size!" );
  31368. static_assert( std::is_standard_layout<PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
  31369. template <>
  31370. struct CppType<StructureType, StructureType::ePipelineCacheCreateInfo>
  31371. {
  31372. using Type = PipelineCacheCreateInfo;
  31373. };
  31374. struct PushConstantRange
  31375. {
  31376. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31377. VULKAN_HPP_CONSTEXPR PushConstantRange(VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ = {}, uint32_t offset_ = {}, uint32_t size_ = {}) VULKAN_HPP_NOEXCEPT
  31378. : stageFlags( stageFlags_ ), offset( offset_ ), size( size_ )
  31379. {}
  31380. VULKAN_HPP_CONSTEXPR PushConstantRange( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31381. PushConstantRange( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
  31382. : PushConstantRange( *reinterpret_cast<PushConstantRange const *>( &rhs ) )
  31383. {}
  31384. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31385. VULKAN_HPP_CONSTEXPR_14 PushConstantRange & operator=( PushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31386. PushConstantRange & operator=( VkPushConstantRange const & rhs ) VULKAN_HPP_NOEXCEPT
  31387. {
  31388. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PushConstantRange const *>( &rhs );
  31389. return *this;
  31390. }
  31391. PushConstantRange & setStageFlags( VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags_ ) VULKAN_HPP_NOEXCEPT
  31392. {
  31393. stageFlags = stageFlags_;
  31394. return *this;
  31395. }
  31396. PushConstantRange & setOffset( uint32_t offset_ ) VULKAN_HPP_NOEXCEPT
  31397. {
  31398. offset = offset_;
  31399. return *this;
  31400. }
  31401. PushConstantRange & setSize( uint32_t size_ ) VULKAN_HPP_NOEXCEPT
  31402. {
  31403. size = size_;
  31404. return *this;
  31405. }
  31406. operator VkPushConstantRange const&() const VULKAN_HPP_NOEXCEPT
  31407. {
  31408. return *reinterpret_cast<const VkPushConstantRange*>( this );
  31409. }
  31410. operator VkPushConstantRange &() VULKAN_HPP_NOEXCEPT
  31411. {
  31412. return *reinterpret_cast<VkPushConstantRange*>( this );
  31413. }
  31414. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31415. auto operator<=>( PushConstantRange const& ) const = default;
  31416. #else
  31417. bool operator==( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  31418. {
  31419. return ( stageFlags == rhs.stageFlags )
  31420. && ( offset == rhs.offset )
  31421. && ( size == rhs.size );
  31422. }
  31423. bool operator!=( PushConstantRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  31424. {
  31425. return !operator==( rhs );
  31426. }
  31427. #endif
  31428. public:
  31429. VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags = {};
  31430. uint32_t offset = {};
  31431. uint32_t size = {};
  31432. };
  31433. static_assert( sizeof( PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
  31434. static_assert( std::is_standard_layout<PushConstantRange>::value, "struct wrapper is not a standard layout!" );
  31435. struct PipelineLayoutCreateInfo
  31436. {
  31437. static const bool allowDuplicate = false;
  31438. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLayoutCreateInfo;
  31439. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31440. VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo(VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ = {}, uint32_t setLayoutCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ = {}, uint32_t pushConstantRangeCount_ = {}, const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ = {}) VULKAN_HPP_NOEXCEPT
  31441. : flags( flags_ ), setLayoutCount( setLayoutCount_ ), pSetLayouts( pSetLayouts_ ), pushConstantRangeCount( pushConstantRangeCount_ ), pPushConstantRanges( pPushConstantRanges_ )
  31442. {}
  31443. VULKAN_HPP_CONSTEXPR PipelineLayoutCreateInfo( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31444. PipelineLayoutCreateInfo( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31445. : PipelineLayoutCreateInfo( *reinterpret_cast<PipelineLayoutCreateInfo const *>( &rhs ) )
  31446. {}
  31447. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31448. PipelineLayoutCreateInfo( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ = {} )
  31449. : flags( flags_ ), setLayoutCount( static_cast<uint32_t>( setLayouts_.size() ) ), pSetLayouts( setLayouts_.data() ), pushConstantRangeCount( static_cast<uint32_t>( pushConstantRanges_.size() ) ), pPushConstantRanges( pushConstantRanges_.data() )
  31450. {}
  31451. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31452. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31453. VULKAN_HPP_CONSTEXPR_14 PipelineLayoutCreateInfo & operator=( PipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31454. PipelineLayoutCreateInfo & operator=( VkPipelineLayoutCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31455. {
  31456. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo const *>( &rhs );
  31457. return *this;
  31458. }
  31459. PipelineLayoutCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31460. {
  31461. pNext = pNext_;
  31462. return *this;
  31463. }
  31464. PipelineLayoutCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  31465. {
  31466. flags = flags_;
  31467. return *this;
  31468. }
  31469. PipelineLayoutCreateInfo & setSetLayoutCount( uint32_t setLayoutCount_ ) VULKAN_HPP_NOEXCEPT
  31470. {
  31471. setLayoutCount = setLayoutCount_;
  31472. return *this;
  31473. }
  31474. PipelineLayoutCreateInfo & setPSetLayouts( const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts_ ) VULKAN_HPP_NOEXCEPT
  31475. {
  31476. pSetLayouts = pSetLayouts_;
  31477. return *this;
  31478. }
  31479. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31480. PipelineLayoutCreateInfo & setSetLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorSetLayout> const & setLayouts_ ) VULKAN_HPP_NOEXCEPT
  31481. {
  31482. setLayoutCount = static_cast<uint32_t>( setLayouts_.size() );
  31483. pSetLayouts = setLayouts_.data();
  31484. return *this;
  31485. }
  31486. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31487. PipelineLayoutCreateInfo & setPushConstantRangeCount( uint32_t pushConstantRangeCount_ ) VULKAN_HPP_NOEXCEPT
  31488. {
  31489. pushConstantRangeCount = pushConstantRangeCount_;
  31490. return *this;
  31491. }
  31492. PipelineLayoutCreateInfo & setPPushConstantRanges( const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
  31493. {
  31494. pPushConstantRanges = pPushConstantRanges_;
  31495. return *this;
  31496. }
  31497. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31498. PipelineLayoutCreateInfo & setPushConstantRanges( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PushConstantRange> const & pushConstantRanges_ ) VULKAN_HPP_NOEXCEPT
  31499. {
  31500. pushConstantRangeCount = static_cast<uint32_t>( pushConstantRanges_.size() );
  31501. pPushConstantRanges = pushConstantRanges_.data();
  31502. return *this;
  31503. }
  31504. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31505. operator VkPipelineLayoutCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  31506. {
  31507. return *reinterpret_cast<const VkPipelineLayoutCreateInfo*>( this );
  31508. }
  31509. operator VkPipelineLayoutCreateInfo &() VULKAN_HPP_NOEXCEPT
  31510. {
  31511. return *reinterpret_cast<VkPipelineLayoutCreateInfo*>( this );
  31512. }
  31513. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31514. auto operator<=>( PipelineLayoutCreateInfo const& ) const = default;
  31515. #else
  31516. bool operator==( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31517. {
  31518. return ( sType == rhs.sType )
  31519. && ( pNext == rhs.pNext )
  31520. && ( flags == rhs.flags )
  31521. && ( setLayoutCount == rhs.setLayoutCount )
  31522. && ( pSetLayouts == rhs.pSetLayouts )
  31523. && ( pushConstantRangeCount == rhs.pushConstantRangeCount )
  31524. && ( pPushConstantRanges == rhs.pPushConstantRanges );
  31525. }
  31526. bool operator!=( PipelineLayoutCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31527. {
  31528. return !operator==( rhs );
  31529. }
  31530. #endif
  31531. public:
  31532. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLayoutCreateInfo;
  31533. const void* pNext = {};
  31534. VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags flags = {};
  31535. uint32_t setLayoutCount = {};
  31536. const VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayouts = {};
  31537. uint32_t pushConstantRangeCount = {};
  31538. const VULKAN_HPP_NAMESPACE::PushConstantRange* pPushConstantRanges = {};
  31539. };
  31540. static_assert( sizeof( PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ), "struct and wrapper have different size!" );
  31541. static_assert( std::is_standard_layout<PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
  31542. template <>
  31543. struct CppType<StructureType, StructureType::ePipelineLayoutCreateInfo>
  31544. {
  31545. using Type = PipelineLayoutCreateInfo;
  31546. };
  31547. struct PrivateDataSlotCreateInfoEXT
  31548. {
  31549. static const bool allowDuplicate = false;
  31550. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePrivateDataSlotCreateInfoEXT;
  31551. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31552. VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT(VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
  31553. : flags( flags_ )
  31554. {}
  31555. VULKAN_HPP_CONSTEXPR PrivateDataSlotCreateInfoEXT( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31556. PrivateDataSlotCreateInfoEXT( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  31557. : PrivateDataSlotCreateInfoEXT( *reinterpret_cast<PrivateDataSlotCreateInfoEXT const *>( &rhs ) )
  31558. {}
  31559. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31560. VULKAN_HPP_CONSTEXPR_14 PrivateDataSlotCreateInfoEXT & operator=( PrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31561. PrivateDataSlotCreateInfoEXT & operator=( VkPrivateDataSlotCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  31562. {
  31563. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT const *>( &rhs );
  31564. return *this;
  31565. }
  31566. PrivateDataSlotCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31567. {
  31568. pNext = pNext_;
  31569. return *this;
  31570. }
  31571. PrivateDataSlotCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  31572. {
  31573. flags = flags_;
  31574. return *this;
  31575. }
  31576. operator VkPrivateDataSlotCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  31577. {
  31578. return *reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT*>( this );
  31579. }
  31580. operator VkPrivateDataSlotCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  31581. {
  31582. return *reinterpret_cast<VkPrivateDataSlotCreateInfoEXT*>( this );
  31583. }
  31584. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31585. auto operator<=>( PrivateDataSlotCreateInfoEXT const& ) const = default;
  31586. #else
  31587. bool operator==( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  31588. {
  31589. return ( sType == rhs.sType )
  31590. && ( pNext == rhs.pNext )
  31591. && ( flags == rhs.flags );
  31592. }
  31593. bool operator!=( PrivateDataSlotCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  31594. {
  31595. return !operator==( rhs );
  31596. }
  31597. #endif
  31598. public:
  31599. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePrivateDataSlotCreateInfoEXT;
  31600. const void* pNext = {};
  31601. VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagsEXT flags = {};
  31602. };
  31603. static_assert( sizeof( PrivateDataSlotCreateInfoEXT ) == sizeof( VkPrivateDataSlotCreateInfoEXT ), "struct and wrapper have different size!" );
  31604. static_assert( std::is_standard_layout<PrivateDataSlotCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  31605. template <>
  31606. struct CppType<StructureType, StructureType::ePrivateDataSlotCreateInfoEXT>
  31607. {
  31608. using Type = PrivateDataSlotCreateInfoEXT;
  31609. };
  31610. class PrivateDataSlotEXT
  31611. {
  31612. public:
  31613. using CType = VkPrivateDataSlotEXT;
  31614. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT;
  31615. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  31616. public:
  31617. VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT() VULKAN_HPP_NOEXCEPT
  31618. : m_privateDataSlotEXT(VK_NULL_HANDLE)
  31619. {}
  31620. VULKAN_HPP_CONSTEXPR PrivateDataSlotEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  31621. : m_privateDataSlotEXT(VK_NULL_HANDLE)
  31622. {}
  31623. VULKAN_HPP_TYPESAFE_EXPLICIT PrivateDataSlotEXT( VkPrivateDataSlotEXT privateDataSlotEXT ) VULKAN_HPP_NOEXCEPT
  31624. : m_privateDataSlotEXT( privateDataSlotEXT )
  31625. {}
  31626. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  31627. PrivateDataSlotEXT & operator=(VkPrivateDataSlotEXT privateDataSlotEXT) VULKAN_HPP_NOEXCEPT
  31628. {
  31629. m_privateDataSlotEXT = privateDataSlotEXT;
  31630. return *this;
  31631. }
  31632. #endif
  31633. PrivateDataSlotEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  31634. {
  31635. m_privateDataSlotEXT = VK_NULL_HANDLE;
  31636. return *this;
  31637. }
  31638. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31639. auto operator<=>( PrivateDataSlotEXT const& ) const = default;
  31640. #else
  31641. bool operator==( PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  31642. {
  31643. return m_privateDataSlotEXT == rhs.m_privateDataSlotEXT;
  31644. }
  31645. bool operator!=(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  31646. {
  31647. return m_privateDataSlotEXT != rhs.m_privateDataSlotEXT;
  31648. }
  31649. bool operator<(PrivateDataSlotEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  31650. {
  31651. return m_privateDataSlotEXT < rhs.m_privateDataSlotEXT;
  31652. }
  31653. #endif
  31654. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPrivateDataSlotEXT() const VULKAN_HPP_NOEXCEPT
  31655. {
  31656. return m_privateDataSlotEXT;
  31657. }
  31658. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  31659. {
  31660. return m_privateDataSlotEXT != VK_NULL_HANDLE;
  31661. }
  31662. bool operator!() const VULKAN_HPP_NOEXCEPT
  31663. {
  31664. return m_privateDataSlotEXT == VK_NULL_HANDLE;
  31665. }
  31666. private:
  31667. VkPrivateDataSlotEXT m_privateDataSlotEXT;
  31668. };
  31669. static_assert( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT ) == sizeof( VkPrivateDataSlotEXT ), "handle and wrapper have different size!" );
  31670. template <>
  31671. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePrivateDataSlotEXT>
  31672. {
  31673. using type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
  31674. };
  31675. template <>
  31676. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePrivateDataSlotEXT>
  31677. {
  31678. using Type = VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT;
  31679. };
  31680. template <>
  31681. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
  31682. {
  31683. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  31684. };
  31685. struct QueryPoolCreateInfo
  31686. {
  31687. static const bool allowDuplicate = false;
  31688. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolCreateInfo;
  31689. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31690. VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo(VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::QueryType queryType_ = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion, uint32_t queryCount_ = {}, VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ = {}) VULKAN_HPP_NOEXCEPT
  31691. : flags( flags_ ), queryType( queryType_ ), queryCount( queryCount_ ), pipelineStatistics( pipelineStatistics_ )
  31692. {}
  31693. VULKAN_HPP_CONSTEXPR QueryPoolCreateInfo( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31694. QueryPoolCreateInfo( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31695. : QueryPoolCreateInfo( *reinterpret_cast<QueryPoolCreateInfo const *>( &rhs ) )
  31696. {}
  31697. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31698. VULKAN_HPP_CONSTEXPR_14 QueryPoolCreateInfo & operator=( QueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31699. QueryPoolCreateInfo & operator=( VkQueryPoolCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  31700. {
  31701. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo const *>( &rhs );
  31702. return *this;
  31703. }
  31704. QueryPoolCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31705. {
  31706. pNext = pNext_;
  31707. return *this;
  31708. }
  31709. QueryPoolCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  31710. {
  31711. flags = flags_;
  31712. return *this;
  31713. }
  31714. QueryPoolCreateInfo & setQueryType( VULKAN_HPP_NAMESPACE::QueryType queryType_ ) VULKAN_HPP_NOEXCEPT
  31715. {
  31716. queryType = queryType_;
  31717. return *this;
  31718. }
  31719. QueryPoolCreateInfo & setQueryCount( uint32_t queryCount_ ) VULKAN_HPP_NOEXCEPT
  31720. {
  31721. queryCount = queryCount_;
  31722. return *this;
  31723. }
  31724. QueryPoolCreateInfo & setPipelineStatistics( VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics_ ) VULKAN_HPP_NOEXCEPT
  31725. {
  31726. pipelineStatistics = pipelineStatistics_;
  31727. return *this;
  31728. }
  31729. operator VkQueryPoolCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  31730. {
  31731. return *reinterpret_cast<const VkQueryPoolCreateInfo*>( this );
  31732. }
  31733. operator VkQueryPoolCreateInfo &() VULKAN_HPP_NOEXCEPT
  31734. {
  31735. return *reinterpret_cast<VkQueryPoolCreateInfo*>( this );
  31736. }
  31737. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31738. auto operator<=>( QueryPoolCreateInfo const& ) const = default;
  31739. #else
  31740. bool operator==( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31741. {
  31742. return ( sType == rhs.sType )
  31743. && ( pNext == rhs.pNext )
  31744. && ( flags == rhs.flags )
  31745. && ( queryType == rhs.queryType )
  31746. && ( queryCount == rhs.queryCount )
  31747. && ( pipelineStatistics == rhs.pipelineStatistics );
  31748. }
  31749. bool operator!=( QueryPoolCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  31750. {
  31751. return !operator==( rhs );
  31752. }
  31753. #endif
  31754. public:
  31755. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolCreateInfo;
  31756. const void* pNext = {};
  31757. VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags flags = {};
  31758. VULKAN_HPP_NAMESPACE::QueryType queryType = VULKAN_HPP_NAMESPACE::QueryType::eOcclusion;
  31759. uint32_t queryCount = {};
  31760. VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags pipelineStatistics = {};
  31761. };
  31762. static_assert( sizeof( QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
  31763. static_assert( std::is_standard_layout<QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
  31764. template <>
  31765. struct CppType<StructureType, StructureType::eQueryPoolCreateInfo>
  31766. {
  31767. using Type = QueryPoolCreateInfo;
  31768. };
  31769. struct RayTracingShaderGroupCreateInfoKHR
  31770. {
  31771. static const bool allowDuplicate = false;
  31772. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
  31773. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31774. VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}, const void* pShaderGroupCaptureReplayHandle_ = {}) VULKAN_HPP_NOEXCEPT
  31775. : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ ), pShaderGroupCaptureReplayHandle( pShaderGroupCaptureReplayHandle_ )
  31776. {}
  31777. VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoKHR( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31778. RayTracingShaderGroupCreateInfoKHR( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31779. : RayTracingShaderGroupCreateInfoKHR( *reinterpret_cast<RayTracingShaderGroupCreateInfoKHR const *>( &rhs ) )
  31780. {}
  31781. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31782. VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoKHR & operator=( RayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31783. RayTracingShaderGroupCreateInfoKHR & operator=( VkRayTracingShaderGroupCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31784. {
  31785. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR const *>( &rhs );
  31786. return *this;
  31787. }
  31788. RayTracingShaderGroupCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31789. {
  31790. pNext = pNext_;
  31791. return *this;
  31792. }
  31793. RayTracingShaderGroupCreateInfoKHR & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
  31794. {
  31795. type = type_;
  31796. return *this;
  31797. }
  31798. RayTracingShaderGroupCreateInfoKHR & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
  31799. {
  31800. generalShader = generalShader_;
  31801. return *this;
  31802. }
  31803. RayTracingShaderGroupCreateInfoKHR & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
  31804. {
  31805. closestHitShader = closestHitShader_;
  31806. return *this;
  31807. }
  31808. RayTracingShaderGroupCreateInfoKHR & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
  31809. {
  31810. anyHitShader = anyHitShader_;
  31811. return *this;
  31812. }
  31813. RayTracingShaderGroupCreateInfoKHR & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
  31814. {
  31815. intersectionShader = intersectionShader_;
  31816. return *this;
  31817. }
  31818. RayTracingShaderGroupCreateInfoKHR & setPShaderGroupCaptureReplayHandle( const void* pShaderGroupCaptureReplayHandle_ ) VULKAN_HPP_NOEXCEPT
  31819. {
  31820. pShaderGroupCaptureReplayHandle = pShaderGroupCaptureReplayHandle_;
  31821. return *this;
  31822. }
  31823. operator VkRayTracingShaderGroupCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  31824. {
  31825. return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoKHR*>( this );
  31826. }
  31827. operator VkRayTracingShaderGroupCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  31828. {
  31829. return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoKHR*>( this );
  31830. }
  31831. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31832. auto operator<=>( RayTracingShaderGroupCreateInfoKHR const& ) const = default;
  31833. #else
  31834. bool operator==( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  31835. {
  31836. return ( sType == rhs.sType )
  31837. && ( pNext == rhs.pNext )
  31838. && ( type == rhs.type )
  31839. && ( generalShader == rhs.generalShader )
  31840. && ( closestHitShader == rhs.closestHitShader )
  31841. && ( anyHitShader == rhs.anyHitShader )
  31842. && ( intersectionShader == rhs.intersectionShader )
  31843. && ( pShaderGroupCaptureReplayHandle == rhs.pShaderGroupCaptureReplayHandle );
  31844. }
  31845. bool operator!=( RayTracingShaderGroupCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  31846. {
  31847. return !operator==( rhs );
  31848. }
  31849. #endif
  31850. public:
  31851. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoKHR;
  31852. const void* pNext = {};
  31853. VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
  31854. uint32_t generalShader = {};
  31855. uint32_t closestHitShader = {};
  31856. uint32_t anyHitShader = {};
  31857. uint32_t intersectionShader = {};
  31858. const void* pShaderGroupCaptureReplayHandle = {};
  31859. };
  31860. static_assert( sizeof( RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ), "struct and wrapper have different size!" );
  31861. static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  31862. template <>
  31863. struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoKHR>
  31864. {
  31865. using Type = RayTracingShaderGroupCreateInfoKHR;
  31866. };
  31867. struct PipelineLibraryCreateInfoKHR
  31868. {
  31869. static const bool allowDuplicate = false;
  31870. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineLibraryCreateInfoKHR;
  31871. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31872. VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR(uint32_t libraryCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ = {}) VULKAN_HPP_NOEXCEPT
  31873. : libraryCount( libraryCount_ ), pLibraries( pLibraries_ )
  31874. {}
  31875. VULKAN_HPP_CONSTEXPR PipelineLibraryCreateInfoKHR( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31876. PipelineLibraryCreateInfoKHR( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31877. : PipelineLibraryCreateInfoKHR( *reinterpret_cast<PipelineLibraryCreateInfoKHR const *>( &rhs ) )
  31878. {}
  31879. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31880. PipelineLibraryCreateInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ )
  31881. : libraryCount( static_cast<uint32_t>( libraries_.size() ) ), pLibraries( libraries_.data() )
  31882. {}
  31883. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31884. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31885. VULKAN_HPP_CONSTEXPR_14 PipelineLibraryCreateInfoKHR & operator=( PipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31886. PipelineLibraryCreateInfoKHR & operator=( VkPipelineLibraryCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31887. {
  31888. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR const *>( &rhs );
  31889. return *this;
  31890. }
  31891. PipelineLibraryCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31892. {
  31893. pNext = pNext_;
  31894. return *this;
  31895. }
  31896. PipelineLibraryCreateInfoKHR & setLibraryCount( uint32_t libraryCount_ ) VULKAN_HPP_NOEXCEPT
  31897. {
  31898. libraryCount = libraryCount_;
  31899. return *this;
  31900. }
  31901. PipelineLibraryCreateInfoKHR & setPLibraries( const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries_ ) VULKAN_HPP_NOEXCEPT
  31902. {
  31903. pLibraries = pLibraries_;
  31904. return *this;
  31905. }
  31906. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31907. PipelineLibraryCreateInfoKHR & setLibraries( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & libraries_ ) VULKAN_HPP_NOEXCEPT
  31908. {
  31909. libraryCount = static_cast<uint32_t>( libraries_.size() );
  31910. pLibraries = libraries_.data();
  31911. return *this;
  31912. }
  31913. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  31914. operator VkPipelineLibraryCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  31915. {
  31916. return *reinterpret_cast<const VkPipelineLibraryCreateInfoKHR*>( this );
  31917. }
  31918. operator VkPipelineLibraryCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  31919. {
  31920. return *reinterpret_cast<VkPipelineLibraryCreateInfoKHR*>( this );
  31921. }
  31922. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31923. auto operator<=>( PipelineLibraryCreateInfoKHR const& ) const = default;
  31924. #else
  31925. bool operator==( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  31926. {
  31927. return ( sType == rhs.sType )
  31928. && ( pNext == rhs.pNext )
  31929. && ( libraryCount == rhs.libraryCount )
  31930. && ( pLibraries == rhs.pLibraries );
  31931. }
  31932. bool operator!=( PipelineLibraryCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  31933. {
  31934. return !operator==( rhs );
  31935. }
  31936. #endif
  31937. public:
  31938. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineLibraryCreateInfoKHR;
  31939. const void* pNext = {};
  31940. uint32_t libraryCount = {};
  31941. const VULKAN_HPP_NAMESPACE::Pipeline* pLibraries = {};
  31942. };
  31943. static_assert( sizeof( PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ), "struct and wrapper have different size!" );
  31944. static_assert( std::is_standard_layout<PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  31945. template <>
  31946. struct CppType<StructureType, StructureType::ePipelineLibraryCreateInfoKHR>
  31947. {
  31948. using Type = PipelineLibraryCreateInfoKHR;
  31949. };
  31950. struct RayTracingPipelineInterfaceCreateInfoKHR
  31951. {
  31952. static const bool allowDuplicate = false;
  31953. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
  31954. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31955. VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR(uint32_t maxPipelineRayPayloadSize_ = {}, uint32_t maxPipelineRayHitAttributeSize_ = {}) VULKAN_HPP_NOEXCEPT
  31956. : maxPipelineRayPayloadSize( maxPipelineRayPayloadSize_ ), maxPipelineRayHitAttributeSize( maxPipelineRayHitAttributeSize_ )
  31957. {}
  31958. VULKAN_HPP_CONSTEXPR RayTracingPipelineInterfaceCreateInfoKHR( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31959. RayTracingPipelineInterfaceCreateInfoKHR( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31960. : RayTracingPipelineInterfaceCreateInfoKHR( *reinterpret_cast<RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs ) )
  31961. {}
  31962. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  31963. VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineInterfaceCreateInfoKHR & operator=( RayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  31964. RayTracingPipelineInterfaceCreateInfoKHR & operator=( VkRayTracingPipelineInterfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  31965. {
  31966. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR const *>( &rhs );
  31967. return *this;
  31968. }
  31969. RayTracingPipelineInterfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  31970. {
  31971. pNext = pNext_;
  31972. return *this;
  31973. }
  31974. RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayPayloadSize( uint32_t maxPipelineRayPayloadSize_ ) VULKAN_HPP_NOEXCEPT
  31975. {
  31976. maxPipelineRayPayloadSize = maxPipelineRayPayloadSize_;
  31977. return *this;
  31978. }
  31979. RayTracingPipelineInterfaceCreateInfoKHR & setMaxPipelineRayHitAttributeSize( uint32_t maxPipelineRayHitAttributeSize_ ) VULKAN_HPP_NOEXCEPT
  31980. {
  31981. maxPipelineRayHitAttributeSize = maxPipelineRayHitAttributeSize_;
  31982. return *this;
  31983. }
  31984. operator VkRayTracingPipelineInterfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  31985. {
  31986. return *reinterpret_cast<const VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
  31987. }
  31988. operator VkRayTracingPipelineInterfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  31989. {
  31990. return *reinterpret_cast<VkRayTracingPipelineInterfaceCreateInfoKHR*>( this );
  31991. }
  31992. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  31993. auto operator<=>( RayTracingPipelineInterfaceCreateInfoKHR const& ) const = default;
  31994. #else
  31995. bool operator==( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  31996. {
  31997. return ( sType == rhs.sType )
  31998. && ( pNext == rhs.pNext )
  31999. && ( maxPipelineRayPayloadSize == rhs.maxPipelineRayPayloadSize )
  32000. && ( maxPipelineRayHitAttributeSize == rhs.maxPipelineRayHitAttributeSize );
  32001. }
  32002. bool operator!=( RayTracingPipelineInterfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  32003. {
  32004. return !operator==( rhs );
  32005. }
  32006. #endif
  32007. public:
  32008. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineInterfaceCreateInfoKHR;
  32009. const void* pNext = {};
  32010. uint32_t maxPipelineRayPayloadSize = {};
  32011. uint32_t maxPipelineRayHitAttributeSize = {};
  32012. };
  32013. static_assert( sizeof( RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  32014. static_assert( std::is_standard_layout<RayTracingPipelineInterfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  32015. template <>
  32016. struct CppType<StructureType, StructureType::eRayTracingPipelineInterfaceCreateInfoKHR>
  32017. {
  32018. using Type = RayTracingPipelineInterfaceCreateInfoKHR;
  32019. };
  32020. struct RayTracingPipelineCreateInfoKHR
  32021. {
  32022. static const bool allowDuplicate = false;
  32023. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoKHR;
  32024. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32025. VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR* pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
  32026. : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  32027. {}
  32028. VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoKHR( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32029. RayTracingPipelineCreateInfoKHR( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  32030. : RayTracingPipelineCreateInfoKHR( *reinterpret_cast<RayTracingPipelineCreateInfoKHR const *>( &rhs ) )
  32031. {}
  32032. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32033. RayTracingPipelineCreateInfoKHR( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ = {}, uint32_t maxPipelineRayRecursionDepth_ = {}, const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR* pLibraryInfo_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ = {}, const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
  32034. : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxPipelineRayRecursionDepth( maxPipelineRayRecursionDepth_ ), pLibraryInfo( pLibraryInfo_ ), pLibraryInterface( pLibraryInterface_ ), pDynamicState( pDynamicState_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  32035. {}
  32036. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32037. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32038. VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoKHR & operator=( RayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32039. RayTracingPipelineCreateInfoKHR & operator=( VkRayTracingPipelineCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  32040. {
  32041. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR const *>( &rhs );
  32042. return *this;
  32043. }
  32044. RayTracingPipelineCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32045. {
  32046. pNext = pNext_;
  32047. return *this;
  32048. }
  32049. RayTracingPipelineCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  32050. {
  32051. flags = flags_;
  32052. return *this;
  32053. }
  32054. RayTracingPipelineCreateInfoKHR & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
  32055. {
  32056. stageCount = stageCount_;
  32057. return *this;
  32058. }
  32059. RayTracingPipelineCreateInfoKHR & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
  32060. {
  32061. pStages = pStages_;
  32062. return *this;
  32063. }
  32064. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32065. RayTracingPipelineCreateInfoKHR & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
  32066. {
  32067. stageCount = static_cast<uint32_t>( stages_.size() );
  32068. pStages = stages_.data();
  32069. return *this;
  32070. }
  32071. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32072. RayTracingPipelineCreateInfoKHR & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
  32073. {
  32074. groupCount = groupCount_;
  32075. return *this;
  32076. }
  32077. RayTracingPipelineCreateInfoKHR & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups_ ) VULKAN_HPP_NOEXCEPT
  32078. {
  32079. pGroups = pGroups_;
  32080. return *this;
  32081. }
  32082. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32083. RayTracingPipelineCreateInfoKHR & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR> const & groups_ ) VULKAN_HPP_NOEXCEPT
  32084. {
  32085. groupCount = static_cast<uint32_t>( groups_.size() );
  32086. pGroups = groups_.data();
  32087. return *this;
  32088. }
  32089. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32090. RayTracingPipelineCreateInfoKHR & setMaxPipelineRayRecursionDepth( uint32_t maxPipelineRayRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
  32091. {
  32092. maxPipelineRayRecursionDepth = maxPipelineRayRecursionDepth_;
  32093. return *this;
  32094. }
  32095. RayTracingPipelineCreateInfoKHR & setPLibraryInfo( const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR* pLibraryInfo_ ) VULKAN_HPP_NOEXCEPT
  32096. {
  32097. pLibraryInfo = pLibraryInfo_;
  32098. return *this;
  32099. }
  32100. RayTracingPipelineCreateInfoKHR & setPLibraryInterface( const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface_ ) VULKAN_HPP_NOEXCEPT
  32101. {
  32102. pLibraryInterface = pLibraryInterface_;
  32103. return *this;
  32104. }
  32105. RayTracingPipelineCreateInfoKHR & setPDynamicState( const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState_ ) VULKAN_HPP_NOEXCEPT
  32106. {
  32107. pDynamicState = pDynamicState_;
  32108. return *this;
  32109. }
  32110. RayTracingPipelineCreateInfoKHR & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
  32111. {
  32112. layout = layout_;
  32113. return *this;
  32114. }
  32115. RayTracingPipelineCreateInfoKHR & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
  32116. {
  32117. basePipelineHandle = basePipelineHandle_;
  32118. return *this;
  32119. }
  32120. RayTracingPipelineCreateInfoKHR & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
  32121. {
  32122. basePipelineIndex = basePipelineIndex_;
  32123. return *this;
  32124. }
  32125. operator VkRayTracingPipelineCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  32126. {
  32127. return *reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR*>( this );
  32128. }
  32129. operator VkRayTracingPipelineCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  32130. {
  32131. return *reinterpret_cast<VkRayTracingPipelineCreateInfoKHR*>( this );
  32132. }
  32133. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32134. auto operator<=>( RayTracingPipelineCreateInfoKHR const& ) const = default;
  32135. #else
  32136. bool operator==( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  32137. {
  32138. return ( sType == rhs.sType )
  32139. && ( pNext == rhs.pNext )
  32140. && ( flags == rhs.flags )
  32141. && ( stageCount == rhs.stageCount )
  32142. && ( pStages == rhs.pStages )
  32143. && ( groupCount == rhs.groupCount )
  32144. && ( pGroups == rhs.pGroups )
  32145. && ( maxPipelineRayRecursionDepth == rhs.maxPipelineRayRecursionDepth )
  32146. && ( pLibraryInfo == rhs.pLibraryInfo )
  32147. && ( pLibraryInterface == rhs.pLibraryInterface )
  32148. && ( pDynamicState == rhs.pDynamicState )
  32149. && ( layout == rhs.layout )
  32150. && ( basePipelineHandle == rhs.basePipelineHandle )
  32151. && ( basePipelineIndex == rhs.basePipelineIndex );
  32152. }
  32153. bool operator!=( RayTracingPipelineCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  32154. {
  32155. return !operator==( rhs );
  32156. }
  32157. #endif
  32158. public:
  32159. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoKHR;
  32160. const void* pNext = {};
  32161. VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
  32162. uint32_t stageCount = {};
  32163. const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
  32164. uint32_t groupCount = {};
  32165. const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR* pGroups = {};
  32166. uint32_t maxPipelineRayRecursionDepth = {};
  32167. const VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR* pLibraryInfo = {};
  32168. const VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface = {};
  32169. const VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo* pDynamicState = {};
  32170. VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
  32171. VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
  32172. int32_t basePipelineIndex = {};
  32173. };
  32174. static_assert( sizeof( RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ), "struct and wrapper have different size!" );
  32175. static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  32176. template <>
  32177. struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoKHR>
  32178. {
  32179. using Type = RayTracingPipelineCreateInfoKHR;
  32180. };
  32181. struct RayTracingShaderGroupCreateInfoNV
  32182. {
  32183. static const bool allowDuplicate = false;
  32184. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingShaderGroupCreateInfoNV;
  32185. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32186. VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV(VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral, uint32_t generalShader_ = {}, uint32_t closestHitShader_ = {}, uint32_t anyHitShader_ = {}, uint32_t intersectionShader_ = {}) VULKAN_HPP_NOEXCEPT
  32187. : type( type_ ), generalShader( generalShader_ ), closestHitShader( closestHitShader_ ), anyHitShader( anyHitShader_ ), intersectionShader( intersectionShader_ )
  32188. {}
  32189. VULKAN_HPP_CONSTEXPR RayTracingShaderGroupCreateInfoNV( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32190. RayTracingShaderGroupCreateInfoNV( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  32191. : RayTracingShaderGroupCreateInfoNV( *reinterpret_cast<RayTracingShaderGroupCreateInfoNV const *>( &rhs ) )
  32192. {}
  32193. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32194. VULKAN_HPP_CONSTEXPR_14 RayTracingShaderGroupCreateInfoNV & operator=( RayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32195. RayTracingShaderGroupCreateInfoNV & operator=( VkRayTracingShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  32196. {
  32197. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV const *>( &rhs );
  32198. return *this;
  32199. }
  32200. RayTracingShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32201. {
  32202. pNext = pNext_;
  32203. return *this;
  32204. }
  32205. RayTracingShaderGroupCreateInfoNV & setType( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type_ ) VULKAN_HPP_NOEXCEPT
  32206. {
  32207. type = type_;
  32208. return *this;
  32209. }
  32210. RayTracingShaderGroupCreateInfoNV & setGeneralShader( uint32_t generalShader_ ) VULKAN_HPP_NOEXCEPT
  32211. {
  32212. generalShader = generalShader_;
  32213. return *this;
  32214. }
  32215. RayTracingShaderGroupCreateInfoNV & setClosestHitShader( uint32_t closestHitShader_ ) VULKAN_HPP_NOEXCEPT
  32216. {
  32217. closestHitShader = closestHitShader_;
  32218. return *this;
  32219. }
  32220. RayTracingShaderGroupCreateInfoNV & setAnyHitShader( uint32_t anyHitShader_ ) VULKAN_HPP_NOEXCEPT
  32221. {
  32222. anyHitShader = anyHitShader_;
  32223. return *this;
  32224. }
  32225. RayTracingShaderGroupCreateInfoNV & setIntersectionShader( uint32_t intersectionShader_ ) VULKAN_HPP_NOEXCEPT
  32226. {
  32227. intersectionShader = intersectionShader_;
  32228. return *this;
  32229. }
  32230. operator VkRayTracingShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  32231. {
  32232. return *reinterpret_cast<const VkRayTracingShaderGroupCreateInfoNV*>( this );
  32233. }
  32234. operator VkRayTracingShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  32235. {
  32236. return *reinterpret_cast<VkRayTracingShaderGroupCreateInfoNV*>( this );
  32237. }
  32238. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32239. auto operator<=>( RayTracingShaderGroupCreateInfoNV const& ) const = default;
  32240. #else
  32241. bool operator==( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  32242. {
  32243. return ( sType == rhs.sType )
  32244. && ( pNext == rhs.pNext )
  32245. && ( type == rhs.type )
  32246. && ( generalShader == rhs.generalShader )
  32247. && ( closestHitShader == rhs.closestHitShader )
  32248. && ( anyHitShader == rhs.anyHitShader )
  32249. && ( intersectionShader == rhs.intersectionShader );
  32250. }
  32251. bool operator!=( RayTracingShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  32252. {
  32253. return !operator==( rhs );
  32254. }
  32255. #endif
  32256. public:
  32257. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingShaderGroupCreateInfoNV;
  32258. const void* pNext = {};
  32259. VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR type = VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR::eGeneral;
  32260. uint32_t generalShader = {};
  32261. uint32_t closestHitShader = {};
  32262. uint32_t anyHitShader = {};
  32263. uint32_t intersectionShader = {};
  32264. };
  32265. static_assert( sizeof( RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
  32266. static_assert( std::is_standard_layout<RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  32267. template <>
  32268. struct CppType<StructureType, StructureType::eRayTracingShaderGroupCreateInfoNV>
  32269. {
  32270. using Type = RayTracingShaderGroupCreateInfoNV;
  32271. };
  32272. struct RayTracingPipelineCreateInfoNV
  32273. {
  32274. static const bool allowDuplicate = false;
  32275. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRayTracingPipelineCreateInfoNV;
  32276. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32277. VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ = {}, uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {}) VULKAN_HPP_NOEXCEPT
  32278. : flags( flags_ ), stageCount( stageCount_ ), pStages( pStages_ ), groupCount( groupCount_ ), pGroups( pGroups_ ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  32279. {}
  32280. VULKAN_HPP_CONSTEXPR RayTracingPipelineCreateInfoNV( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32281. RayTracingPipelineCreateInfoNV( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  32282. : RayTracingPipelineCreateInfoNV( *reinterpret_cast<RayTracingPipelineCreateInfoNV const *>( &rhs ) )
  32283. {}
  32284. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32285. RayTracingPipelineCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ = {}, uint32_t maxRecursionDepth_ = {}, VULKAN_HPP_NAMESPACE::PipelineLayout layout_ = {}, VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ = {}, int32_t basePipelineIndex_ = {} )
  32286. : flags( flags_ ), stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), maxRecursionDepth( maxRecursionDepth_ ), layout( layout_ ), basePipelineHandle( basePipelineHandle_ ), basePipelineIndex( basePipelineIndex_ )
  32287. {}
  32288. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32289. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32290. VULKAN_HPP_CONSTEXPR_14 RayTracingPipelineCreateInfoNV & operator=( RayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32291. RayTracingPipelineCreateInfoNV & operator=( VkRayTracingPipelineCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  32292. {
  32293. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV const *>( &rhs );
  32294. return *this;
  32295. }
  32296. RayTracingPipelineCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32297. {
  32298. pNext = pNext_;
  32299. return *this;
  32300. }
  32301. RayTracingPipelineCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  32302. {
  32303. flags = flags_;
  32304. return *this;
  32305. }
  32306. RayTracingPipelineCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
  32307. {
  32308. stageCount = stageCount_;
  32309. return *this;
  32310. }
  32311. RayTracingPipelineCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
  32312. {
  32313. pStages = pStages_;
  32314. return *this;
  32315. }
  32316. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32317. RayTracingPipelineCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
  32318. {
  32319. stageCount = static_cast<uint32_t>( stages_.size() );
  32320. pStages = stages_.data();
  32321. return *this;
  32322. }
  32323. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32324. RayTracingPipelineCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
  32325. {
  32326. groupCount = groupCount_;
  32327. return *this;
  32328. }
  32329. RayTracingPipelineCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
  32330. {
  32331. pGroups = pGroups_;
  32332. return *this;
  32333. }
  32334. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32335. RayTracingPipelineCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
  32336. {
  32337. groupCount = static_cast<uint32_t>( groups_.size() );
  32338. pGroups = groups_.data();
  32339. return *this;
  32340. }
  32341. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32342. RayTracingPipelineCreateInfoNV & setMaxRecursionDepth( uint32_t maxRecursionDepth_ ) VULKAN_HPP_NOEXCEPT
  32343. {
  32344. maxRecursionDepth = maxRecursionDepth_;
  32345. return *this;
  32346. }
  32347. RayTracingPipelineCreateInfoNV & setLayout( VULKAN_HPP_NAMESPACE::PipelineLayout layout_ ) VULKAN_HPP_NOEXCEPT
  32348. {
  32349. layout = layout_;
  32350. return *this;
  32351. }
  32352. RayTracingPipelineCreateInfoNV & setBasePipelineHandle( VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle_ ) VULKAN_HPP_NOEXCEPT
  32353. {
  32354. basePipelineHandle = basePipelineHandle_;
  32355. return *this;
  32356. }
  32357. RayTracingPipelineCreateInfoNV & setBasePipelineIndex( int32_t basePipelineIndex_ ) VULKAN_HPP_NOEXCEPT
  32358. {
  32359. basePipelineIndex = basePipelineIndex_;
  32360. return *this;
  32361. }
  32362. operator VkRayTracingPipelineCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  32363. {
  32364. return *reinterpret_cast<const VkRayTracingPipelineCreateInfoNV*>( this );
  32365. }
  32366. operator VkRayTracingPipelineCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  32367. {
  32368. return *reinterpret_cast<VkRayTracingPipelineCreateInfoNV*>( this );
  32369. }
  32370. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32371. auto operator<=>( RayTracingPipelineCreateInfoNV const& ) const = default;
  32372. #else
  32373. bool operator==( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  32374. {
  32375. return ( sType == rhs.sType )
  32376. && ( pNext == rhs.pNext )
  32377. && ( flags == rhs.flags )
  32378. && ( stageCount == rhs.stageCount )
  32379. && ( pStages == rhs.pStages )
  32380. && ( groupCount == rhs.groupCount )
  32381. && ( pGroups == rhs.pGroups )
  32382. && ( maxRecursionDepth == rhs.maxRecursionDepth )
  32383. && ( layout == rhs.layout )
  32384. && ( basePipelineHandle == rhs.basePipelineHandle )
  32385. && ( basePipelineIndex == rhs.basePipelineIndex );
  32386. }
  32387. bool operator!=( RayTracingPipelineCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  32388. {
  32389. return !operator==( rhs );
  32390. }
  32391. #endif
  32392. public:
  32393. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRayTracingPipelineCreateInfoNV;
  32394. const void* pNext = {};
  32395. VULKAN_HPP_NAMESPACE::PipelineCreateFlags flags = {};
  32396. uint32_t stageCount = {};
  32397. const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
  32398. uint32_t groupCount = {};
  32399. const VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV* pGroups = {};
  32400. uint32_t maxRecursionDepth = {};
  32401. VULKAN_HPP_NAMESPACE::PipelineLayout layout = {};
  32402. VULKAN_HPP_NAMESPACE::Pipeline basePipelineHandle = {};
  32403. int32_t basePipelineIndex = {};
  32404. };
  32405. static_assert( sizeof( RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ), "struct and wrapper have different size!" );
  32406. static_assert( std::is_standard_layout<RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  32407. template <>
  32408. struct CppType<StructureType, StructureType::eRayTracingPipelineCreateInfoNV>
  32409. {
  32410. using Type = RayTracingPipelineCreateInfoNV;
  32411. };
  32412. struct SubpassDescription
  32413. {
  32414. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32415. VULKAN_HPP_CONSTEXPR SubpassDescription(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
  32416. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
  32417. {}
  32418. VULKAN_HPP_CONSTEXPR SubpassDescription( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32419. SubpassDescription( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  32420. : SubpassDescription( *reinterpret_cast<SubpassDescription const *>( &rhs ) )
  32421. {}
  32422. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32423. SubpassDescription( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
  32424. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
  32425. {
  32426. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  32427. VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
  32428. #else
  32429. if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
  32430. {
  32431. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription::SubpassDescription: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
  32432. }
  32433. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  32434. }
  32435. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32436. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32437. VULKAN_HPP_CONSTEXPR_14 SubpassDescription & operator=( SubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32438. SubpassDescription & operator=( VkSubpassDescription const & rhs ) VULKAN_HPP_NOEXCEPT
  32439. {
  32440. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription const *>( &rhs );
  32441. return *this;
  32442. }
  32443. SubpassDescription & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
  32444. {
  32445. flags = flags_;
  32446. return *this;
  32447. }
  32448. SubpassDescription & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  32449. {
  32450. pipelineBindPoint = pipelineBindPoint_;
  32451. return *this;
  32452. }
  32453. SubpassDescription & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32454. {
  32455. inputAttachmentCount = inputAttachmentCount_;
  32456. return *this;
  32457. }
  32458. SubpassDescription & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
  32459. {
  32460. pInputAttachments = pInputAttachments_;
  32461. return *this;
  32462. }
  32463. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32464. SubpassDescription & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
  32465. {
  32466. inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
  32467. pInputAttachments = inputAttachments_.data();
  32468. return *this;
  32469. }
  32470. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32471. SubpassDescription & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32472. {
  32473. colorAttachmentCount = colorAttachmentCount_;
  32474. return *this;
  32475. }
  32476. SubpassDescription & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
  32477. {
  32478. pColorAttachments = pColorAttachments_;
  32479. return *this;
  32480. }
  32481. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32482. SubpassDescription & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
  32483. {
  32484. colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
  32485. pColorAttachments = colorAttachments_.data();
  32486. return *this;
  32487. }
  32488. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32489. SubpassDescription & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32490. {
  32491. pResolveAttachments = pResolveAttachments_;
  32492. return *this;
  32493. }
  32494. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32495. SubpassDescription & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32496. {
  32497. colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
  32498. pResolveAttachments = resolveAttachments_.data();
  32499. return *this;
  32500. }
  32501. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32502. SubpassDescription & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
  32503. {
  32504. pDepthStencilAttachment = pDepthStencilAttachment_;
  32505. return *this;
  32506. }
  32507. SubpassDescription & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32508. {
  32509. preserveAttachmentCount = preserveAttachmentCount_;
  32510. return *this;
  32511. }
  32512. SubpassDescription & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32513. {
  32514. pPreserveAttachments = pPreserveAttachments_;
  32515. return *this;
  32516. }
  32517. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32518. SubpassDescription & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32519. {
  32520. preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
  32521. pPreserveAttachments = preserveAttachments_.data();
  32522. return *this;
  32523. }
  32524. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32525. operator VkSubpassDescription const&() const VULKAN_HPP_NOEXCEPT
  32526. {
  32527. return *reinterpret_cast<const VkSubpassDescription*>( this );
  32528. }
  32529. operator VkSubpassDescription &() VULKAN_HPP_NOEXCEPT
  32530. {
  32531. return *reinterpret_cast<VkSubpassDescription*>( this );
  32532. }
  32533. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32534. auto operator<=>( SubpassDescription const& ) const = default;
  32535. #else
  32536. bool operator==( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  32537. {
  32538. return ( flags == rhs.flags )
  32539. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  32540. && ( inputAttachmentCount == rhs.inputAttachmentCount )
  32541. && ( pInputAttachments == rhs.pInputAttachments )
  32542. && ( colorAttachmentCount == rhs.colorAttachmentCount )
  32543. && ( pColorAttachments == rhs.pColorAttachments )
  32544. && ( pResolveAttachments == rhs.pResolveAttachments )
  32545. && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
  32546. && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
  32547. && ( pPreserveAttachments == rhs.pPreserveAttachments );
  32548. }
  32549. bool operator!=( SubpassDescription const& rhs ) const VULKAN_HPP_NOEXCEPT
  32550. {
  32551. return !operator==( rhs );
  32552. }
  32553. #endif
  32554. public:
  32555. VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
  32556. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  32557. uint32_t inputAttachmentCount = {};
  32558. const VULKAN_HPP_NAMESPACE::AttachmentReference* pInputAttachments = {};
  32559. uint32_t colorAttachmentCount = {};
  32560. const VULKAN_HPP_NAMESPACE::AttachmentReference* pColorAttachments = {};
  32561. const VULKAN_HPP_NAMESPACE::AttachmentReference* pResolveAttachments = {};
  32562. const VULKAN_HPP_NAMESPACE::AttachmentReference* pDepthStencilAttachment = {};
  32563. uint32_t preserveAttachmentCount = {};
  32564. const uint32_t* pPreserveAttachments = {};
  32565. };
  32566. static_assert( sizeof( SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
  32567. static_assert( std::is_standard_layout<SubpassDescription>::value, "struct wrapper is not a standard layout!" );
  32568. struct SubpassDependency
  32569. {
  32570. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32571. VULKAN_HPP_CONSTEXPR SubpassDependency(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}) VULKAN_HPP_NOEXCEPT
  32572. : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ )
  32573. {}
  32574. VULKAN_HPP_CONSTEXPR SubpassDependency( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32575. SubpassDependency( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
  32576. : SubpassDependency( *reinterpret_cast<SubpassDependency const *>( &rhs ) )
  32577. {}
  32578. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32579. VULKAN_HPP_CONSTEXPR_14 SubpassDependency & operator=( SubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32580. SubpassDependency & operator=( VkSubpassDependency const & rhs ) VULKAN_HPP_NOEXCEPT
  32581. {
  32582. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency const *>( &rhs );
  32583. return *this;
  32584. }
  32585. SubpassDependency & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
  32586. {
  32587. srcSubpass = srcSubpass_;
  32588. return *this;
  32589. }
  32590. SubpassDependency & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
  32591. {
  32592. dstSubpass = dstSubpass_;
  32593. return *this;
  32594. }
  32595. SubpassDependency & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
  32596. {
  32597. srcStageMask = srcStageMask_;
  32598. return *this;
  32599. }
  32600. SubpassDependency & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
  32601. {
  32602. dstStageMask = dstStageMask_;
  32603. return *this;
  32604. }
  32605. SubpassDependency & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
  32606. {
  32607. srcAccessMask = srcAccessMask_;
  32608. return *this;
  32609. }
  32610. SubpassDependency & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
  32611. {
  32612. dstAccessMask = dstAccessMask_;
  32613. return *this;
  32614. }
  32615. SubpassDependency & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
  32616. {
  32617. dependencyFlags = dependencyFlags_;
  32618. return *this;
  32619. }
  32620. operator VkSubpassDependency const&() const VULKAN_HPP_NOEXCEPT
  32621. {
  32622. return *reinterpret_cast<const VkSubpassDependency*>( this );
  32623. }
  32624. operator VkSubpassDependency &() VULKAN_HPP_NOEXCEPT
  32625. {
  32626. return *reinterpret_cast<VkSubpassDependency*>( this );
  32627. }
  32628. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32629. auto operator<=>( SubpassDependency const& ) const = default;
  32630. #else
  32631. bool operator==( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
  32632. {
  32633. return ( srcSubpass == rhs.srcSubpass )
  32634. && ( dstSubpass == rhs.dstSubpass )
  32635. && ( srcStageMask == rhs.srcStageMask )
  32636. && ( dstStageMask == rhs.dstStageMask )
  32637. && ( srcAccessMask == rhs.srcAccessMask )
  32638. && ( dstAccessMask == rhs.dstAccessMask )
  32639. && ( dependencyFlags == rhs.dependencyFlags );
  32640. }
  32641. bool operator!=( SubpassDependency const& rhs ) const VULKAN_HPP_NOEXCEPT
  32642. {
  32643. return !operator==( rhs );
  32644. }
  32645. #endif
  32646. public:
  32647. uint32_t srcSubpass = {};
  32648. uint32_t dstSubpass = {};
  32649. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
  32650. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
  32651. VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
  32652. VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
  32653. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
  32654. };
  32655. static_assert( sizeof( SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
  32656. static_assert( std::is_standard_layout<SubpassDependency>::value, "struct wrapper is not a standard layout!" );
  32657. struct RenderPassCreateInfo
  32658. {
  32659. static const bool allowDuplicate = false;
  32660. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo;
  32661. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32662. VULKAN_HPP_CONSTEXPR RenderPassCreateInfo(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ = {}) VULKAN_HPP_NOEXCEPT
  32663. : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ )
  32664. {}
  32665. VULKAN_HPP_CONSTEXPR RenderPassCreateInfo( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32666. RenderPassCreateInfo( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  32667. : RenderPassCreateInfo( *reinterpret_cast<RenderPassCreateInfo const *>( &rhs ) )
  32668. {}
  32669. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32670. RenderPassCreateInfo( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ = {} )
  32671. : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() )
  32672. {}
  32673. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32674. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32675. VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo & operator=( RenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32676. RenderPassCreateInfo & operator=( VkRenderPassCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  32677. {
  32678. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo const *>( &rhs );
  32679. return *this;
  32680. }
  32681. RenderPassCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32682. {
  32683. pNext = pNext_;
  32684. return *this;
  32685. }
  32686. RenderPassCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  32687. {
  32688. flags = flags_;
  32689. return *this;
  32690. }
  32691. RenderPassCreateInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32692. {
  32693. attachmentCount = attachmentCount_;
  32694. return *this;
  32695. }
  32696. RenderPassCreateInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments_ ) VULKAN_HPP_NOEXCEPT
  32697. {
  32698. pAttachments = pAttachments_;
  32699. return *this;
  32700. }
  32701. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32702. RenderPassCreateInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription> const & attachments_ ) VULKAN_HPP_NOEXCEPT
  32703. {
  32704. attachmentCount = static_cast<uint32_t>( attachments_.size() );
  32705. pAttachments = attachments_.data();
  32706. return *this;
  32707. }
  32708. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32709. RenderPassCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
  32710. {
  32711. subpassCount = subpassCount_;
  32712. return *this;
  32713. }
  32714. RenderPassCreateInfo & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
  32715. {
  32716. pSubpasses = pSubpasses_;
  32717. return *this;
  32718. }
  32719. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32720. RenderPassCreateInfo & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
  32721. {
  32722. subpassCount = static_cast<uint32_t>( subpasses_.size() );
  32723. pSubpasses = subpasses_.data();
  32724. return *this;
  32725. }
  32726. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32727. RenderPassCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
  32728. {
  32729. dependencyCount = dependencyCount_;
  32730. return *this;
  32731. }
  32732. RenderPassCreateInfo & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies_ ) VULKAN_HPP_NOEXCEPT
  32733. {
  32734. pDependencies = pDependencies_;
  32735. return *this;
  32736. }
  32737. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32738. RenderPassCreateInfo & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
  32739. {
  32740. dependencyCount = static_cast<uint32_t>( dependencies_.size() );
  32741. pDependencies = dependencies_.data();
  32742. return *this;
  32743. }
  32744. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32745. operator VkRenderPassCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  32746. {
  32747. return *reinterpret_cast<const VkRenderPassCreateInfo*>( this );
  32748. }
  32749. operator VkRenderPassCreateInfo &() VULKAN_HPP_NOEXCEPT
  32750. {
  32751. return *reinterpret_cast<VkRenderPassCreateInfo*>( this );
  32752. }
  32753. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32754. auto operator<=>( RenderPassCreateInfo const& ) const = default;
  32755. #else
  32756. bool operator==( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  32757. {
  32758. return ( sType == rhs.sType )
  32759. && ( pNext == rhs.pNext )
  32760. && ( flags == rhs.flags )
  32761. && ( attachmentCount == rhs.attachmentCount )
  32762. && ( pAttachments == rhs.pAttachments )
  32763. && ( subpassCount == rhs.subpassCount )
  32764. && ( pSubpasses == rhs.pSubpasses )
  32765. && ( dependencyCount == rhs.dependencyCount )
  32766. && ( pDependencies == rhs.pDependencies );
  32767. }
  32768. bool operator!=( RenderPassCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  32769. {
  32770. return !operator==( rhs );
  32771. }
  32772. #endif
  32773. public:
  32774. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo;
  32775. const void* pNext = {};
  32776. VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
  32777. uint32_t attachmentCount = {};
  32778. const VULKAN_HPP_NAMESPACE::AttachmentDescription* pAttachments = {};
  32779. uint32_t subpassCount = {};
  32780. const VULKAN_HPP_NAMESPACE::SubpassDescription* pSubpasses = {};
  32781. uint32_t dependencyCount = {};
  32782. const VULKAN_HPP_NAMESPACE::SubpassDependency* pDependencies = {};
  32783. };
  32784. static_assert( sizeof( RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
  32785. static_assert( std::is_standard_layout<RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
  32786. template <>
  32787. struct CppType<StructureType, StructureType::eRenderPassCreateInfo>
  32788. {
  32789. using Type = RenderPassCreateInfo;
  32790. };
  32791. struct SubpassDescription2
  32792. {
  32793. static const bool allowDuplicate = false;
  32794. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescription2;
  32795. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32796. VULKAN_HPP_CONSTEXPR SubpassDescription2(VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ = {}, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, uint32_t viewMask_ = {}, uint32_t inputAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ = {}, uint32_t colorAttachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, uint32_t preserveAttachmentCount_ = {}, const uint32_t* pPreserveAttachments_ = {}) VULKAN_HPP_NOEXCEPT
  32797. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( inputAttachmentCount_ ), pInputAttachments( pInputAttachments_ ), colorAttachmentCount( colorAttachmentCount_ ), pColorAttachments( pColorAttachments_ ), pResolveAttachments( pResolveAttachments_ ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( preserveAttachmentCount_ ), pPreserveAttachments( pPreserveAttachments_ )
  32798. {}
  32799. VULKAN_HPP_CONSTEXPR SubpassDescription2( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32800. SubpassDescription2( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
  32801. : SubpassDescription2( *reinterpret_cast<SubpassDescription2 const *>( &rhs ) )
  32802. {}
  32803. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32804. SubpassDescription2( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_, VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_, uint32_t viewMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ = {} )
  32805. : flags( flags_ ), pipelineBindPoint( pipelineBindPoint_ ), viewMask( viewMask_ ), inputAttachmentCount( static_cast<uint32_t>( inputAttachments_.size() ) ), pInputAttachments( inputAttachments_.data() ), colorAttachmentCount( static_cast<uint32_t>( colorAttachments_.size() ) ), pColorAttachments( colorAttachments_.data() ), pResolveAttachments( resolveAttachments_.data() ), pDepthStencilAttachment( pDepthStencilAttachment_ ), preserveAttachmentCount( static_cast<uint32_t>( preserveAttachments_.size() ) ), pPreserveAttachments( preserveAttachments_.data() )
  32806. {
  32807. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  32808. VULKAN_HPP_ASSERT( resolveAttachments_.empty() || ( colorAttachments_.size() == resolveAttachments_.size() ) );
  32809. #else
  32810. if ( !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() ) )
  32811. {
  32812. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubpassDescription2::SubpassDescription2: !resolveAttachments_.empty() && ( colorAttachments_.size() != resolveAttachments_.size() )" );
  32813. }
  32814. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  32815. }
  32816. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32817. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32818. VULKAN_HPP_CONSTEXPR_14 SubpassDescription2 & operator=( SubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32819. SubpassDescription2 & operator=( VkSubpassDescription2 const & rhs ) VULKAN_HPP_NOEXCEPT
  32820. {
  32821. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescription2 const *>( &rhs );
  32822. return *this;
  32823. }
  32824. SubpassDescription2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32825. {
  32826. pNext = pNext_;
  32827. return *this;
  32828. }
  32829. SubpassDescription2 & setFlags( VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags_ ) VULKAN_HPP_NOEXCEPT
  32830. {
  32831. flags = flags_;
  32832. return *this;
  32833. }
  32834. SubpassDescription2 & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  32835. {
  32836. pipelineBindPoint = pipelineBindPoint_;
  32837. return *this;
  32838. }
  32839. SubpassDescription2 & setViewMask( uint32_t viewMask_ ) VULKAN_HPP_NOEXCEPT
  32840. {
  32841. viewMask = viewMask_;
  32842. return *this;
  32843. }
  32844. SubpassDescription2 & setInputAttachmentCount( uint32_t inputAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32845. {
  32846. inputAttachmentCount = inputAttachmentCount_;
  32847. return *this;
  32848. }
  32849. SubpassDescription2 & setPInputAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments_ ) VULKAN_HPP_NOEXCEPT
  32850. {
  32851. pInputAttachments = pInputAttachments_;
  32852. return *this;
  32853. }
  32854. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32855. SubpassDescription2 & setInputAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & inputAttachments_ ) VULKAN_HPP_NOEXCEPT
  32856. {
  32857. inputAttachmentCount = static_cast<uint32_t>( inputAttachments_.size() );
  32858. pInputAttachments = inputAttachments_.data();
  32859. return *this;
  32860. }
  32861. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32862. SubpassDescription2 & setColorAttachmentCount( uint32_t colorAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32863. {
  32864. colorAttachmentCount = colorAttachmentCount_;
  32865. return *this;
  32866. }
  32867. SubpassDescription2 & setPColorAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments_ ) VULKAN_HPP_NOEXCEPT
  32868. {
  32869. pColorAttachments = pColorAttachments_;
  32870. return *this;
  32871. }
  32872. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32873. SubpassDescription2 & setColorAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & colorAttachments_ ) VULKAN_HPP_NOEXCEPT
  32874. {
  32875. colorAttachmentCount = static_cast<uint32_t>( colorAttachments_.size() );
  32876. pColorAttachments = colorAttachments_.data();
  32877. return *this;
  32878. }
  32879. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32880. SubpassDescription2 & setPResolveAttachments( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32881. {
  32882. pResolveAttachments = pResolveAttachments_;
  32883. return *this;
  32884. }
  32885. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32886. SubpassDescription2 & setResolveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentReference2> const & resolveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32887. {
  32888. colorAttachmentCount = static_cast<uint32_t>( resolveAttachments_.size() );
  32889. pResolveAttachments = resolveAttachments_.data();
  32890. return *this;
  32891. }
  32892. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32893. SubpassDescription2 & setPDepthStencilAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment_ ) VULKAN_HPP_NOEXCEPT
  32894. {
  32895. pDepthStencilAttachment = pDepthStencilAttachment_;
  32896. return *this;
  32897. }
  32898. SubpassDescription2 & setPreserveAttachmentCount( uint32_t preserveAttachmentCount_ ) VULKAN_HPP_NOEXCEPT
  32899. {
  32900. preserveAttachmentCount = preserveAttachmentCount_;
  32901. return *this;
  32902. }
  32903. SubpassDescription2 & setPPreserveAttachments( const uint32_t* pPreserveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32904. {
  32905. pPreserveAttachments = pPreserveAttachments_;
  32906. return *this;
  32907. }
  32908. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32909. SubpassDescription2 & setPreserveAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & preserveAttachments_ ) VULKAN_HPP_NOEXCEPT
  32910. {
  32911. preserveAttachmentCount = static_cast<uint32_t>( preserveAttachments_.size() );
  32912. pPreserveAttachments = preserveAttachments_.data();
  32913. return *this;
  32914. }
  32915. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  32916. operator VkSubpassDescription2 const&() const VULKAN_HPP_NOEXCEPT
  32917. {
  32918. return *reinterpret_cast<const VkSubpassDescription2*>( this );
  32919. }
  32920. operator VkSubpassDescription2 &() VULKAN_HPP_NOEXCEPT
  32921. {
  32922. return *reinterpret_cast<VkSubpassDescription2*>( this );
  32923. }
  32924. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  32925. auto operator<=>( SubpassDescription2 const& ) const = default;
  32926. #else
  32927. bool operator==( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  32928. {
  32929. return ( sType == rhs.sType )
  32930. && ( pNext == rhs.pNext )
  32931. && ( flags == rhs.flags )
  32932. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  32933. && ( viewMask == rhs.viewMask )
  32934. && ( inputAttachmentCount == rhs.inputAttachmentCount )
  32935. && ( pInputAttachments == rhs.pInputAttachments )
  32936. && ( colorAttachmentCount == rhs.colorAttachmentCount )
  32937. && ( pColorAttachments == rhs.pColorAttachments )
  32938. && ( pResolveAttachments == rhs.pResolveAttachments )
  32939. && ( pDepthStencilAttachment == rhs.pDepthStencilAttachment )
  32940. && ( preserveAttachmentCount == rhs.preserveAttachmentCount )
  32941. && ( pPreserveAttachments == rhs.pPreserveAttachments );
  32942. }
  32943. bool operator!=( SubpassDescription2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  32944. {
  32945. return !operator==( rhs );
  32946. }
  32947. #endif
  32948. public:
  32949. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescription2;
  32950. const void* pNext = {};
  32951. VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags flags = {};
  32952. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  32953. uint32_t viewMask = {};
  32954. uint32_t inputAttachmentCount = {};
  32955. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pInputAttachments = {};
  32956. uint32_t colorAttachmentCount = {};
  32957. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pColorAttachments = {};
  32958. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pResolveAttachments = {};
  32959. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilAttachment = {};
  32960. uint32_t preserveAttachmentCount = {};
  32961. const uint32_t* pPreserveAttachments = {};
  32962. };
  32963. static_assert( sizeof( SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
  32964. static_assert( std::is_standard_layout<SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
  32965. template <>
  32966. struct CppType<StructureType, StructureType::eSubpassDescription2>
  32967. {
  32968. using Type = SubpassDescription2;
  32969. };
  32970. using SubpassDescription2KHR = SubpassDescription2;
  32971. struct SubpassDependency2
  32972. {
  32973. static const bool allowDuplicate = false;
  32974. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDependency2;
  32975. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32976. VULKAN_HPP_CONSTEXPR SubpassDependency2(uint32_t srcSubpass_ = {}, uint32_t dstSubpass_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ = {}, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ = {}, VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ = {}, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ = {}, int32_t viewOffset_ = {}) VULKAN_HPP_NOEXCEPT
  32977. : srcSubpass( srcSubpass_ ), dstSubpass( dstSubpass_ ), srcStageMask( srcStageMask_ ), dstStageMask( dstStageMask_ ), srcAccessMask( srcAccessMask_ ), dstAccessMask( dstAccessMask_ ), dependencyFlags( dependencyFlags_ ), viewOffset( viewOffset_ )
  32978. {}
  32979. VULKAN_HPP_CONSTEXPR SubpassDependency2( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32980. SubpassDependency2( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
  32981. : SubpassDependency2( *reinterpret_cast<SubpassDependency2 const *>( &rhs ) )
  32982. {}
  32983. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  32984. VULKAN_HPP_CONSTEXPR_14 SubpassDependency2 & operator=( SubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  32985. SubpassDependency2 & operator=( VkSubpassDependency2 const & rhs ) VULKAN_HPP_NOEXCEPT
  32986. {
  32987. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDependency2 const *>( &rhs );
  32988. return *this;
  32989. }
  32990. SubpassDependency2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  32991. {
  32992. pNext = pNext_;
  32993. return *this;
  32994. }
  32995. SubpassDependency2 & setSrcSubpass( uint32_t srcSubpass_ ) VULKAN_HPP_NOEXCEPT
  32996. {
  32997. srcSubpass = srcSubpass_;
  32998. return *this;
  32999. }
  33000. SubpassDependency2 & setDstSubpass( uint32_t dstSubpass_ ) VULKAN_HPP_NOEXCEPT
  33001. {
  33002. dstSubpass = dstSubpass_;
  33003. return *this;
  33004. }
  33005. SubpassDependency2 & setSrcStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask_ ) VULKAN_HPP_NOEXCEPT
  33006. {
  33007. srcStageMask = srcStageMask_;
  33008. return *this;
  33009. }
  33010. SubpassDependency2 & setDstStageMask( VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask_ ) VULKAN_HPP_NOEXCEPT
  33011. {
  33012. dstStageMask = dstStageMask_;
  33013. return *this;
  33014. }
  33015. SubpassDependency2 & setSrcAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask_ ) VULKAN_HPP_NOEXCEPT
  33016. {
  33017. srcAccessMask = srcAccessMask_;
  33018. return *this;
  33019. }
  33020. SubpassDependency2 & setDstAccessMask( VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask_ ) VULKAN_HPP_NOEXCEPT
  33021. {
  33022. dstAccessMask = dstAccessMask_;
  33023. return *this;
  33024. }
  33025. SubpassDependency2 & setDependencyFlags( VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags_ ) VULKAN_HPP_NOEXCEPT
  33026. {
  33027. dependencyFlags = dependencyFlags_;
  33028. return *this;
  33029. }
  33030. SubpassDependency2 & setViewOffset( int32_t viewOffset_ ) VULKAN_HPP_NOEXCEPT
  33031. {
  33032. viewOffset = viewOffset_;
  33033. return *this;
  33034. }
  33035. operator VkSubpassDependency2 const&() const VULKAN_HPP_NOEXCEPT
  33036. {
  33037. return *reinterpret_cast<const VkSubpassDependency2*>( this );
  33038. }
  33039. operator VkSubpassDependency2 &() VULKAN_HPP_NOEXCEPT
  33040. {
  33041. return *reinterpret_cast<VkSubpassDependency2*>( this );
  33042. }
  33043. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33044. auto operator<=>( SubpassDependency2 const& ) const = default;
  33045. #else
  33046. bool operator==( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  33047. {
  33048. return ( sType == rhs.sType )
  33049. && ( pNext == rhs.pNext )
  33050. && ( srcSubpass == rhs.srcSubpass )
  33051. && ( dstSubpass == rhs.dstSubpass )
  33052. && ( srcStageMask == rhs.srcStageMask )
  33053. && ( dstStageMask == rhs.dstStageMask )
  33054. && ( srcAccessMask == rhs.srcAccessMask )
  33055. && ( dstAccessMask == rhs.dstAccessMask )
  33056. && ( dependencyFlags == rhs.dependencyFlags )
  33057. && ( viewOffset == rhs.viewOffset );
  33058. }
  33059. bool operator!=( SubpassDependency2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  33060. {
  33061. return !operator==( rhs );
  33062. }
  33063. #endif
  33064. public:
  33065. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDependency2;
  33066. const void* pNext = {};
  33067. uint32_t srcSubpass = {};
  33068. uint32_t dstSubpass = {};
  33069. VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask = {};
  33070. VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask = {};
  33071. VULKAN_HPP_NAMESPACE::AccessFlags srcAccessMask = {};
  33072. VULKAN_HPP_NAMESPACE::AccessFlags dstAccessMask = {};
  33073. VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags = {};
  33074. int32_t viewOffset = {};
  33075. };
  33076. static_assert( sizeof( SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
  33077. static_assert( std::is_standard_layout<SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
  33078. template <>
  33079. struct CppType<StructureType, StructureType::eSubpassDependency2>
  33080. {
  33081. using Type = SubpassDependency2;
  33082. };
  33083. using SubpassDependency2KHR = SubpassDependency2;
  33084. struct RenderPassCreateInfo2
  33085. {
  33086. static const bool allowDuplicate = false;
  33087. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassCreateInfo2;
  33088. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33089. VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2(VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ = {}, uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ = {}, uint32_t subpassCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ = {}, uint32_t dependencyCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ = {}, uint32_t correlatedViewMaskCount_ = {}, const uint32_t* pCorrelatedViewMasks_ = {}) VULKAN_HPP_NOEXCEPT
  33090. : flags( flags_ ), attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ ), subpassCount( subpassCount_ ), pSubpasses( pSubpasses_ ), dependencyCount( dependencyCount_ ), pDependencies( pDependencies_ ), correlatedViewMaskCount( correlatedViewMaskCount_ ), pCorrelatedViewMasks( pCorrelatedViewMasks_ )
  33091. {}
  33092. VULKAN_HPP_CONSTEXPR RenderPassCreateInfo2( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33093. RenderPassCreateInfo2( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  33094. : RenderPassCreateInfo2( *reinterpret_cast<RenderPassCreateInfo2 const *>( &rhs ) )
  33095. {}
  33096. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33097. RenderPassCreateInfo2( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ = {} )
  33098. : flags( flags_ ), attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() ), subpassCount( static_cast<uint32_t>( subpasses_.size() ) ), pSubpasses( subpasses_.data() ), dependencyCount( static_cast<uint32_t>( dependencies_.size() ) ), pDependencies( dependencies_.data() ), correlatedViewMaskCount( static_cast<uint32_t>( correlatedViewMasks_.size() ) ), pCorrelatedViewMasks( correlatedViewMasks_.data() )
  33099. {}
  33100. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33101. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33102. VULKAN_HPP_CONSTEXPR_14 RenderPassCreateInfo2 & operator=( RenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33103. RenderPassCreateInfo2 & operator=( VkRenderPassCreateInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  33104. {
  33105. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 const *>( &rhs );
  33106. return *this;
  33107. }
  33108. RenderPassCreateInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33109. {
  33110. pNext = pNext_;
  33111. return *this;
  33112. }
  33113. RenderPassCreateInfo2 & setFlags( VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  33114. {
  33115. flags = flags_;
  33116. return *this;
  33117. }
  33118. RenderPassCreateInfo2 & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
  33119. {
  33120. attachmentCount = attachmentCount_;
  33121. return *this;
  33122. }
  33123. RenderPassCreateInfo2 & setPAttachments( const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments_ ) VULKAN_HPP_NOEXCEPT
  33124. {
  33125. pAttachments = pAttachments_;
  33126. return *this;
  33127. }
  33128. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33129. RenderPassCreateInfo2 & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentDescription2> const & attachments_ ) VULKAN_HPP_NOEXCEPT
  33130. {
  33131. attachmentCount = static_cast<uint32_t>( attachments_.size() );
  33132. pAttachments = attachments_.data();
  33133. return *this;
  33134. }
  33135. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33136. RenderPassCreateInfo2 & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
  33137. {
  33138. subpassCount = subpassCount_;
  33139. return *this;
  33140. }
  33141. RenderPassCreateInfo2 & setPSubpasses( const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses_ ) VULKAN_HPP_NOEXCEPT
  33142. {
  33143. pSubpasses = pSubpasses_;
  33144. return *this;
  33145. }
  33146. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33147. RenderPassCreateInfo2 & setSubpasses( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDescription2> const & subpasses_ ) VULKAN_HPP_NOEXCEPT
  33148. {
  33149. subpassCount = static_cast<uint32_t>( subpasses_.size() );
  33150. pSubpasses = subpasses_.data();
  33151. return *this;
  33152. }
  33153. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33154. RenderPassCreateInfo2 & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
  33155. {
  33156. dependencyCount = dependencyCount_;
  33157. return *this;
  33158. }
  33159. RenderPassCreateInfo2 & setPDependencies( const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies_ ) VULKAN_HPP_NOEXCEPT
  33160. {
  33161. pDependencies = pDependencies_;
  33162. return *this;
  33163. }
  33164. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33165. RenderPassCreateInfo2 & setDependencies( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassDependency2> const & dependencies_ ) VULKAN_HPP_NOEXCEPT
  33166. {
  33167. dependencyCount = static_cast<uint32_t>( dependencies_.size() );
  33168. pDependencies = dependencies_.data();
  33169. return *this;
  33170. }
  33171. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33172. RenderPassCreateInfo2 & setCorrelatedViewMaskCount( uint32_t correlatedViewMaskCount_ ) VULKAN_HPP_NOEXCEPT
  33173. {
  33174. correlatedViewMaskCount = correlatedViewMaskCount_;
  33175. return *this;
  33176. }
  33177. RenderPassCreateInfo2 & setPCorrelatedViewMasks( const uint32_t* pCorrelatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
  33178. {
  33179. pCorrelatedViewMasks = pCorrelatedViewMasks_;
  33180. return *this;
  33181. }
  33182. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33183. RenderPassCreateInfo2 & setCorrelatedViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlatedViewMasks_ ) VULKAN_HPP_NOEXCEPT
  33184. {
  33185. correlatedViewMaskCount = static_cast<uint32_t>( correlatedViewMasks_.size() );
  33186. pCorrelatedViewMasks = correlatedViewMasks_.data();
  33187. return *this;
  33188. }
  33189. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33190. operator VkRenderPassCreateInfo2 const&() const VULKAN_HPP_NOEXCEPT
  33191. {
  33192. return *reinterpret_cast<const VkRenderPassCreateInfo2*>( this );
  33193. }
  33194. operator VkRenderPassCreateInfo2 &() VULKAN_HPP_NOEXCEPT
  33195. {
  33196. return *reinterpret_cast<VkRenderPassCreateInfo2*>( this );
  33197. }
  33198. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33199. auto operator<=>( RenderPassCreateInfo2 const& ) const = default;
  33200. #else
  33201. bool operator==( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  33202. {
  33203. return ( sType == rhs.sType )
  33204. && ( pNext == rhs.pNext )
  33205. && ( flags == rhs.flags )
  33206. && ( attachmentCount == rhs.attachmentCount )
  33207. && ( pAttachments == rhs.pAttachments )
  33208. && ( subpassCount == rhs.subpassCount )
  33209. && ( pSubpasses == rhs.pSubpasses )
  33210. && ( dependencyCount == rhs.dependencyCount )
  33211. && ( pDependencies == rhs.pDependencies )
  33212. && ( correlatedViewMaskCount == rhs.correlatedViewMaskCount )
  33213. && ( pCorrelatedViewMasks == rhs.pCorrelatedViewMasks );
  33214. }
  33215. bool operator!=( RenderPassCreateInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  33216. {
  33217. return !operator==( rhs );
  33218. }
  33219. #endif
  33220. public:
  33221. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassCreateInfo2;
  33222. const void* pNext = {};
  33223. VULKAN_HPP_NAMESPACE::RenderPassCreateFlags flags = {};
  33224. uint32_t attachmentCount = {};
  33225. const VULKAN_HPP_NAMESPACE::AttachmentDescription2* pAttachments = {};
  33226. uint32_t subpassCount = {};
  33227. const VULKAN_HPP_NAMESPACE::SubpassDescription2* pSubpasses = {};
  33228. uint32_t dependencyCount = {};
  33229. const VULKAN_HPP_NAMESPACE::SubpassDependency2* pDependencies = {};
  33230. uint32_t correlatedViewMaskCount = {};
  33231. const uint32_t* pCorrelatedViewMasks = {};
  33232. };
  33233. static_assert( sizeof( RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ), "struct and wrapper have different size!" );
  33234. static_assert( std::is_standard_layout<RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
  33235. template <>
  33236. struct CppType<StructureType, StructureType::eRenderPassCreateInfo2>
  33237. {
  33238. using Type = RenderPassCreateInfo2;
  33239. };
  33240. using RenderPassCreateInfo2KHR = RenderPassCreateInfo2;
  33241. struct SamplerCreateInfo
  33242. {
  33243. static const bool allowDuplicate = false;
  33244. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCreateInfo;
  33245. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33246. VULKAN_HPP_CONSTEXPR SamplerCreateInfo(VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::Filter magFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Filter minFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat, float mipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ = {}, float maxAnisotropy_ = {}, VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ = {}, VULKAN_HPP_NAMESPACE::CompareOp compareOp_ = VULKAN_HPP_NAMESPACE::CompareOp::eNever, float minLod_ = {}, float maxLod_ = {}, VULKAN_HPP_NAMESPACE::BorderColor borderColor_ = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack, VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ = {}) VULKAN_HPP_NOEXCEPT
  33247. : flags( flags_ ), magFilter( magFilter_ ), minFilter( minFilter_ ), mipmapMode( mipmapMode_ ), addressModeU( addressModeU_ ), addressModeV( addressModeV_ ), addressModeW( addressModeW_ ), mipLodBias( mipLodBias_ ), anisotropyEnable( anisotropyEnable_ ), maxAnisotropy( maxAnisotropy_ ), compareEnable( compareEnable_ ), compareOp( compareOp_ ), minLod( minLod_ ), maxLod( maxLod_ ), borderColor( borderColor_ ), unnormalizedCoordinates( unnormalizedCoordinates_ )
  33248. {}
  33249. VULKAN_HPP_CONSTEXPR SamplerCreateInfo( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33250. SamplerCreateInfo( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33251. : SamplerCreateInfo( *reinterpret_cast<SamplerCreateInfo const *>( &rhs ) )
  33252. {}
  33253. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33254. VULKAN_HPP_CONSTEXPR_14 SamplerCreateInfo & operator=( SamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33255. SamplerCreateInfo & operator=( VkSamplerCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33256. {
  33257. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCreateInfo const *>( &rhs );
  33258. return *this;
  33259. }
  33260. SamplerCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33261. {
  33262. pNext = pNext_;
  33263. return *this;
  33264. }
  33265. SamplerCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  33266. {
  33267. flags = flags_;
  33268. return *this;
  33269. }
  33270. SamplerCreateInfo & setMagFilter( VULKAN_HPP_NAMESPACE::Filter magFilter_ ) VULKAN_HPP_NOEXCEPT
  33271. {
  33272. magFilter = magFilter_;
  33273. return *this;
  33274. }
  33275. SamplerCreateInfo & setMinFilter( VULKAN_HPP_NAMESPACE::Filter minFilter_ ) VULKAN_HPP_NOEXCEPT
  33276. {
  33277. minFilter = minFilter_;
  33278. return *this;
  33279. }
  33280. SamplerCreateInfo & setMipmapMode( VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode_ ) VULKAN_HPP_NOEXCEPT
  33281. {
  33282. mipmapMode = mipmapMode_;
  33283. return *this;
  33284. }
  33285. SamplerCreateInfo & setAddressModeU( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU_ ) VULKAN_HPP_NOEXCEPT
  33286. {
  33287. addressModeU = addressModeU_;
  33288. return *this;
  33289. }
  33290. SamplerCreateInfo & setAddressModeV( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV_ ) VULKAN_HPP_NOEXCEPT
  33291. {
  33292. addressModeV = addressModeV_;
  33293. return *this;
  33294. }
  33295. SamplerCreateInfo & setAddressModeW( VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW_ ) VULKAN_HPP_NOEXCEPT
  33296. {
  33297. addressModeW = addressModeW_;
  33298. return *this;
  33299. }
  33300. SamplerCreateInfo & setMipLodBias( float mipLodBias_ ) VULKAN_HPP_NOEXCEPT
  33301. {
  33302. mipLodBias = mipLodBias_;
  33303. return *this;
  33304. }
  33305. SamplerCreateInfo & setAnisotropyEnable( VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable_ ) VULKAN_HPP_NOEXCEPT
  33306. {
  33307. anisotropyEnable = anisotropyEnable_;
  33308. return *this;
  33309. }
  33310. SamplerCreateInfo & setMaxAnisotropy( float maxAnisotropy_ ) VULKAN_HPP_NOEXCEPT
  33311. {
  33312. maxAnisotropy = maxAnisotropy_;
  33313. return *this;
  33314. }
  33315. SamplerCreateInfo & setCompareEnable( VULKAN_HPP_NAMESPACE::Bool32 compareEnable_ ) VULKAN_HPP_NOEXCEPT
  33316. {
  33317. compareEnable = compareEnable_;
  33318. return *this;
  33319. }
  33320. SamplerCreateInfo & setCompareOp( VULKAN_HPP_NAMESPACE::CompareOp compareOp_ ) VULKAN_HPP_NOEXCEPT
  33321. {
  33322. compareOp = compareOp_;
  33323. return *this;
  33324. }
  33325. SamplerCreateInfo & setMinLod( float minLod_ ) VULKAN_HPP_NOEXCEPT
  33326. {
  33327. minLod = minLod_;
  33328. return *this;
  33329. }
  33330. SamplerCreateInfo & setMaxLod( float maxLod_ ) VULKAN_HPP_NOEXCEPT
  33331. {
  33332. maxLod = maxLod_;
  33333. return *this;
  33334. }
  33335. SamplerCreateInfo & setBorderColor( VULKAN_HPP_NAMESPACE::BorderColor borderColor_ ) VULKAN_HPP_NOEXCEPT
  33336. {
  33337. borderColor = borderColor_;
  33338. return *this;
  33339. }
  33340. SamplerCreateInfo & setUnnormalizedCoordinates( VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates_ ) VULKAN_HPP_NOEXCEPT
  33341. {
  33342. unnormalizedCoordinates = unnormalizedCoordinates_;
  33343. return *this;
  33344. }
  33345. operator VkSamplerCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  33346. {
  33347. return *reinterpret_cast<const VkSamplerCreateInfo*>( this );
  33348. }
  33349. operator VkSamplerCreateInfo &() VULKAN_HPP_NOEXCEPT
  33350. {
  33351. return *reinterpret_cast<VkSamplerCreateInfo*>( this );
  33352. }
  33353. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33354. auto operator<=>( SamplerCreateInfo const& ) const = default;
  33355. #else
  33356. bool operator==( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33357. {
  33358. return ( sType == rhs.sType )
  33359. && ( pNext == rhs.pNext )
  33360. && ( flags == rhs.flags )
  33361. && ( magFilter == rhs.magFilter )
  33362. && ( minFilter == rhs.minFilter )
  33363. && ( mipmapMode == rhs.mipmapMode )
  33364. && ( addressModeU == rhs.addressModeU )
  33365. && ( addressModeV == rhs.addressModeV )
  33366. && ( addressModeW == rhs.addressModeW )
  33367. && ( mipLodBias == rhs.mipLodBias )
  33368. && ( anisotropyEnable == rhs.anisotropyEnable )
  33369. && ( maxAnisotropy == rhs.maxAnisotropy )
  33370. && ( compareEnable == rhs.compareEnable )
  33371. && ( compareOp == rhs.compareOp )
  33372. && ( minLod == rhs.minLod )
  33373. && ( maxLod == rhs.maxLod )
  33374. && ( borderColor == rhs.borderColor )
  33375. && ( unnormalizedCoordinates == rhs.unnormalizedCoordinates );
  33376. }
  33377. bool operator!=( SamplerCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33378. {
  33379. return !operator==( rhs );
  33380. }
  33381. #endif
  33382. public:
  33383. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCreateInfo;
  33384. const void* pNext = {};
  33385. VULKAN_HPP_NAMESPACE::SamplerCreateFlags flags = {};
  33386. VULKAN_HPP_NAMESPACE::Filter magFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
  33387. VULKAN_HPP_NAMESPACE::Filter minFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
  33388. VULKAN_HPP_NAMESPACE::SamplerMipmapMode mipmapMode = VULKAN_HPP_NAMESPACE::SamplerMipmapMode::eNearest;
  33389. VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeU = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
  33390. VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeV = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
  33391. VULKAN_HPP_NAMESPACE::SamplerAddressMode addressModeW = VULKAN_HPP_NAMESPACE::SamplerAddressMode::eRepeat;
  33392. float mipLodBias = {};
  33393. VULKAN_HPP_NAMESPACE::Bool32 anisotropyEnable = {};
  33394. float maxAnisotropy = {};
  33395. VULKAN_HPP_NAMESPACE::Bool32 compareEnable = {};
  33396. VULKAN_HPP_NAMESPACE::CompareOp compareOp = VULKAN_HPP_NAMESPACE::CompareOp::eNever;
  33397. float minLod = {};
  33398. float maxLod = {};
  33399. VULKAN_HPP_NAMESPACE::BorderColor borderColor = VULKAN_HPP_NAMESPACE::BorderColor::eFloatTransparentBlack;
  33400. VULKAN_HPP_NAMESPACE::Bool32 unnormalizedCoordinates = {};
  33401. };
  33402. static_assert( sizeof( SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
  33403. static_assert( std::is_standard_layout<SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
  33404. template <>
  33405. struct CppType<StructureType, StructureType::eSamplerCreateInfo>
  33406. {
  33407. using Type = SamplerCreateInfo;
  33408. };
  33409. struct SamplerYcbcrConversionCreateInfo
  33410. {
  33411. static const bool allowDuplicate = false;
  33412. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionCreateInfo;
  33413. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33414. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity, VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull, VULKAN_HPP_NAMESPACE::ComponentMapping components_ = {}, VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven, VULKAN_HPP_NAMESPACE::Filter chromaFilter_ = VULKAN_HPP_NAMESPACE::Filter::eNearest, VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ = {}) VULKAN_HPP_NOEXCEPT
  33415. : format( format_ ), ycbcrModel( ycbcrModel_ ), ycbcrRange( ycbcrRange_ ), components( components_ ), xChromaOffset( xChromaOffset_ ), yChromaOffset( yChromaOffset_ ), chromaFilter( chromaFilter_ ), forceExplicitReconstruction( forceExplicitReconstruction_ )
  33416. {}
  33417. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionCreateInfo( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33418. SamplerYcbcrConversionCreateInfo( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33419. : SamplerYcbcrConversionCreateInfo( *reinterpret_cast<SamplerYcbcrConversionCreateInfo const *>( &rhs ) )
  33420. {}
  33421. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33422. VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionCreateInfo & operator=( SamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33423. SamplerYcbcrConversionCreateInfo & operator=( VkSamplerYcbcrConversionCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33424. {
  33425. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo const *>( &rhs );
  33426. return *this;
  33427. }
  33428. SamplerYcbcrConversionCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33429. {
  33430. pNext = pNext_;
  33431. return *this;
  33432. }
  33433. SamplerYcbcrConversionCreateInfo & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  33434. {
  33435. format = format_;
  33436. return *this;
  33437. }
  33438. SamplerYcbcrConversionCreateInfo & setYcbcrModel( VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel_ ) VULKAN_HPP_NOEXCEPT
  33439. {
  33440. ycbcrModel = ycbcrModel_;
  33441. return *this;
  33442. }
  33443. SamplerYcbcrConversionCreateInfo & setYcbcrRange( VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange_ ) VULKAN_HPP_NOEXCEPT
  33444. {
  33445. ycbcrRange = ycbcrRange_;
  33446. return *this;
  33447. }
  33448. SamplerYcbcrConversionCreateInfo & setComponents( VULKAN_HPP_NAMESPACE::ComponentMapping const & components_ ) VULKAN_HPP_NOEXCEPT
  33449. {
  33450. components = components_;
  33451. return *this;
  33452. }
  33453. SamplerYcbcrConversionCreateInfo & setXChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset_ ) VULKAN_HPP_NOEXCEPT
  33454. {
  33455. xChromaOffset = xChromaOffset_;
  33456. return *this;
  33457. }
  33458. SamplerYcbcrConversionCreateInfo & setYChromaOffset( VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset_ ) VULKAN_HPP_NOEXCEPT
  33459. {
  33460. yChromaOffset = yChromaOffset_;
  33461. return *this;
  33462. }
  33463. SamplerYcbcrConversionCreateInfo & setChromaFilter( VULKAN_HPP_NAMESPACE::Filter chromaFilter_ ) VULKAN_HPP_NOEXCEPT
  33464. {
  33465. chromaFilter = chromaFilter_;
  33466. return *this;
  33467. }
  33468. SamplerYcbcrConversionCreateInfo & setForceExplicitReconstruction( VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction_ ) VULKAN_HPP_NOEXCEPT
  33469. {
  33470. forceExplicitReconstruction = forceExplicitReconstruction_;
  33471. return *this;
  33472. }
  33473. operator VkSamplerYcbcrConversionCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  33474. {
  33475. return *reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo*>( this );
  33476. }
  33477. operator VkSamplerYcbcrConversionCreateInfo &() VULKAN_HPP_NOEXCEPT
  33478. {
  33479. return *reinterpret_cast<VkSamplerYcbcrConversionCreateInfo*>( this );
  33480. }
  33481. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33482. auto operator<=>( SamplerYcbcrConversionCreateInfo const& ) const = default;
  33483. #else
  33484. bool operator==( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33485. {
  33486. return ( sType == rhs.sType )
  33487. && ( pNext == rhs.pNext )
  33488. && ( format == rhs.format )
  33489. && ( ycbcrModel == rhs.ycbcrModel )
  33490. && ( ycbcrRange == rhs.ycbcrRange )
  33491. && ( components == rhs.components )
  33492. && ( xChromaOffset == rhs.xChromaOffset )
  33493. && ( yChromaOffset == rhs.yChromaOffset )
  33494. && ( chromaFilter == rhs.chromaFilter )
  33495. && ( forceExplicitReconstruction == rhs.forceExplicitReconstruction );
  33496. }
  33497. bool operator!=( SamplerYcbcrConversionCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33498. {
  33499. return !operator==( rhs );
  33500. }
  33501. #endif
  33502. public:
  33503. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionCreateInfo;
  33504. const void* pNext = {};
  33505. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  33506. VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion ycbcrModel = VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion::eRgbIdentity;
  33507. VULKAN_HPP_NAMESPACE::SamplerYcbcrRange ycbcrRange = VULKAN_HPP_NAMESPACE::SamplerYcbcrRange::eItuFull;
  33508. VULKAN_HPP_NAMESPACE::ComponentMapping components = {};
  33509. VULKAN_HPP_NAMESPACE::ChromaLocation xChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
  33510. VULKAN_HPP_NAMESPACE::ChromaLocation yChromaOffset = VULKAN_HPP_NAMESPACE::ChromaLocation::eCositedEven;
  33511. VULKAN_HPP_NAMESPACE::Filter chromaFilter = VULKAN_HPP_NAMESPACE::Filter::eNearest;
  33512. VULKAN_HPP_NAMESPACE::Bool32 forceExplicitReconstruction = {};
  33513. };
  33514. static_assert( sizeof( SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ), "struct and wrapper have different size!" );
  33515. static_assert( std::is_standard_layout<SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
  33516. template <>
  33517. struct CppType<StructureType, StructureType::eSamplerYcbcrConversionCreateInfo>
  33518. {
  33519. using Type = SamplerYcbcrConversionCreateInfo;
  33520. };
  33521. using SamplerYcbcrConversionCreateInfoKHR = SamplerYcbcrConversionCreateInfo;
  33522. class SamplerYcbcrConversion
  33523. {
  33524. public:
  33525. using CType = VkSamplerYcbcrConversion;
  33526. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion;
  33527. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion;
  33528. public:
  33529. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion() VULKAN_HPP_NOEXCEPT
  33530. : m_samplerYcbcrConversion(VK_NULL_HANDLE)
  33531. {}
  33532. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversion( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  33533. : m_samplerYcbcrConversion(VK_NULL_HANDLE)
  33534. {}
  33535. VULKAN_HPP_TYPESAFE_EXPLICIT SamplerYcbcrConversion( VkSamplerYcbcrConversion samplerYcbcrConversion ) VULKAN_HPP_NOEXCEPT
  33536. : m_samplerYcbcrConversion( samplerYcbcrConversion )
  33537. {}
  33538. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  33539. SamplerYcbcrConversion & operator=(VkSamplerYcbcrConversion samplerYcbcrConversion) VULKAN_HPP_NOEXCEPT
  33540. {
  33541. m_samplerYcbcrConversion = samplerYcbcrConversion;
  33542. return *this;
  33543. }
  33544. #endif
  33545. SamplerYcbcrConversion & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  33546. {
  33547. m_samplerYcbcrConversion = VK_NULL_HANDLE;
  33548. return *this;
  33549. }
  33550. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33551. auto operator<=>( SamplerYcbcrConversion const& ) const = default;
  33552. #else
  33553. bool operator==( SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
  33554. {
  33555. return m_samplerYcbcrConversion == rhs.m_samplerYcbcrConversion;
  33556. }
  33557. bool operator!=(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
  33558. {
  33559. return m_samplerYcbcrConversion != rhs.m_samplerYcbcrConversion;
  33560. }
  33561. bool operator<(SamplerYcbcrConversion const & rhs ) const VULKAN_HPP_NOEXCEPT
  33562. {
  33563. return m_samplerYcbcrConversion < rhs.m_samplerYcbcrConversion;
  33564. }
  33565. #endif
  33566. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSamplerYcbcrConversion() const VULKAN_HPP_NOEXCEPT
  33567. {
  33568. return m_samplerYcbcrConversion;
  33569. }
  33570. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  33571. {
  33572. return m_samplerYcbcrConversion != VK_NULL_HANDLE;
  33573. }
  33574. bool operator!() const VULKAN_HPP_NOEXCEPT
  33575. {
  33576. return m_samplerYcbcrConversion == VK_NULL_HANDLE;
  33577. }
  33578. private:
  33579. VkSamplerYcbcrConversion m_samplerYcbcrConversion;
  33580. };
  33581. static_assert( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ), "handle and wrapper have different size!" );
  33582. template <>
  33583. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSamplerYcbcrConversion>
  33584. {
  33585. using type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  33586. };
  33587. template <>
  33588. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSamplerYcbcrConversion>
  33589. {
  33590. using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  33591. };
  33592. template <>
  33593. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSamplerYcbcrConversion>
  33594. {
  33595. using Type = VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
  33596. };
  33597. template <>
  33598. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
  33599. {
  33600. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  33601. };
  33602. using SamplerYcbcrConversionKHR = SamplerYcbcrConversion;
  33603. struct SemaphoreCreateInfo
  33604. {
  33605. static const bool allowDuplicate = false;
  33606. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreCreateInfo;
  33607. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33608. VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  33609. : flags( flags_ )
  33610. {}
  33611. VULKAN_HPP_CONSTEXPR SemaphoreCreateInfo( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33612. SemaphoreCreateInfo( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33613. : SemaphoreCreateInfo( *reinterpret_cast<SemaphoreCreateInfo const *>( &rhs ) )
  33614. {}
  33615. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33616. VULKAN_HPP_CONSTEXPR_14 SemaphoreCreateInfo & operator=( SemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33617. SemaphoreCreateInfo & operator=( VkSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33618. {
  33619. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo const *>( &rhs );
  33620. return *this;
  33621. }
  33622. SemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33623. {
  33624. pNext = pNext_;
  33625. return *this;
  33626. }
  33627. SemaphoreCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  33628. {
  33629. flags = flags_;
  33630. return *this;
  33631. }
  33632. operator VkSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  33633. {
  33634. return *reinterpret_cast<const VkSemaphoreCreateInfo*>( this );
  33635. }
  33636. operator VkSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
  33637. {
  33638. return *reinterpret_cast<VkSemaphoreCreateInfo*>( this );
  33639. }
  33640. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33641. auto operator<=>( SemaphoreCreateInfo const& ) const = default;
  33642. #else
  33643. bool operator==( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33644. {
  33645. return ( sType == rhs.sType )
  33646. && ( pNext == rhs.pNext )
  33647. && ( flags == rhs.flags );
  33648. }
  33649. bool operator!=( SemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33650. {
  33651. return !operator==( rhs );
  33652. }
  33653. #endif
  33654. public:
  33655. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreCreateInfo;
  33656. const void* pNext = {};
  33657. VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags flags = {};
  33658. };
  33659. static_assert( sizeof( SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
  33660. static_assert( std::is_standard_layout<SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
  33661. template <>
  33662. struct CppType<StructureType, StructureType::eSemaphoreCreateInfo>
  33663. {
  33664. using Type = SemaphoreCreateInfo;
  33665. };
  33666. struct ShaderModuleCreateInfo
  33667. {
  33668. static const bool allowDuplicate = false;
  33669. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleCreateInfo;
  33670. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33671. VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo(VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ = {}, size_t codeSize_ = {}, const uint32_t* pCode_ = {}) VULKAN_HPP_NOEXCEPT
  33672. : flags( flags_ ), codeSize( codeSize_ ), pCode( pCode_ )
  33673. {}
  33674. VULKAN_HPP_CONSTEXPR ShaderModuleCreateInfo( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33675. ShaderModuleCreateInfo( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33676. : ShaderModuleCreateInfo( *reinterpret_cast<ShaderModuleCreateInfo const *>( &rhs ) )
  33677. {}
  33678. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33679. ShaderModuleCreateInfo( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ )
  33680. : flags( flags_ ), codeSize( code_.size() * 4 ), pCode( code_.data() )
  33681. {}
  33682. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33683. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33684. VULKAN_HPP_CONSTEXPR_14 ShaderModuleCreateInfo & operator=( ShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33685. ShaderModuleCreateInfo & operator=( VkShaderModuleCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  33686. {
  33687. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo const *>( &rhs );
  33688. return *this;
  33689. }
  33690. ShaderModuleCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33691. {
  33692. pNext = pNext_;
  33693. return *this;
  33694. }
  33695. ShaderModuleCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  33696. {
  33697. flags = flags_;
  33698. return *this;
  33699. }
  33700. ShaderModuleCreateInfo & setCodeSize( size_t codeSize_ ) VULKAN_HPP_NOEXCEPT
  33701. {
  33702. codeSize = codeSize_;
  33703. return *this;
  33704. }
  33705. ShaderModuleCreateInfo & setPCode( const uint32_t* pCode_ ) VULKAN_HPP_NOEXCEPT
  33706. {
  33707. pCode = pCode_;
  33708. return *this;
  33709. }
  33710. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33711. ShaderModuleCreateInfo & setCode( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & code_ ) VULKAN_HPP_NOEXCEPT
  33712. {
  33713. codeSize = code_.size() * 4;
  33714. pCode = code_.data();
  33715. return *this;
  33716. }
  33717. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33718. operator VkShaderModuleCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  33719. {
  33720. return *reinterpret_cast<const VkShaderModuleCreateInfo*>( this );
  33721. }
  33722. operator VkShaderModuleCreateInfo &() VULKAN_HPP_NOEXCEPT
  33723. {
  33724. return *reinterpret_cast<VkShaderModuleCreateInfo*>( this );
  33725. }
  33726. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33727. auto operator<=>( ShaderModuleCreateInfo const& ) const = default;
  33728. #else
  33729. bool operator==( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33730. {
  33731. return ( sType == rhs.sType )
  33732. && ( pNext == rhs.pNext )
  33733. && ( flags == rhs.flags )
  33734. && ( codeSize == rhs.codeSize )
  33735. && ( pCode == rhs.pCode );
  33736. }
  33737. bool operator!=( ShaderModuleCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  33738. {
  33739. return !operator==( rhs );
  33740. }
  33741. #endif
  33742. public:
  33743. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleCreateInfo;
  33744. const void* pNext = {};
  33745. VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags flags = {};
  33746. size_t codeSize = {};
  33747. const uint32_t* pCode = {};
  33748. };
  33749. static_assert( sizeof( ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ), "struct and wrapper have different size!" );
  33750. static_assert( std::is_standard_layout<ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
  33751. template <>
  33752. struct CppType<StructureType, StructureType::eShaderModuleCreateInfo>
  33753. {
  33754. using Type = ShaderModuleCreateInfo;
  33755. };
  33756. class SurfaceKHR
  33757. {
  33758. public:
  33759. using CType = VkSurfaceKHR;
  33760. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR;
  33761. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR;
  33762. public:
  33763. VULKAN_HPP_CONSTEXPR SurfaceKHR() VULKAN_HPP_NOEXCEPT
  33764. : m_surfaceKHR(VK_NULL_HANDLE)
  33765. {}
  33766. VULKAN_HPP_CONSTEXPR SurfaceKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  33767. : m_surfaceKHR(VK_NULL_HANDLE)
  33768. {}
  33769. VULKAN_HPP_TYPESAFE_EXPLICIT SurfaceKHR( VkSurfaceKHR surfaceKHR ) VULKAN_HPP_NOEXCEPT
  33770. : m_surfaceKHR( surfaceKHR )
  33771. {}
  33772. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  33773. SurfaceKHR & operator=(VkSurfaceKHR surfaceKHR) VULKAN_HPP_NOEXCEPT
  33774. {
  33775. m_surfaceKHR = surfaceKHR;
  33776. return *this;
  33777. }
  33778. #endif
  33779. SurfaceKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  33780. {
  33781. m_surfaceKHR = VK_NULL_HANDLE;
  33782. return *this;
  33783. }
  33784. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33785. auto operator<=>( SurfaceKHR const& ) const = default;
  33786. #else
  33787. bool operator==( SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  33788. {
  33789. return m_surfaceKHR == rhs.m_surfaceKHR;
  33790. }
  33791. bool operator!=(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  33792. {
  33793. return m_surfaceKHR != rhs.m_surfaceKHR;
  33794. }
  33795. bool operator<(SurfaceKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  33796. {
  33797. return m_surfaceKHR < rhs.m_surfaceKHR;
  33798. }
  33799. #endif
  33800. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkSurfaceKHR() const VULKAN_HPP_NOEXCEPT
  33801. {
  33802. return m_surfaceKHR;
  33803. }
  33804. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  33805. {
  33806. return m_surfaceKHR != VK_NULL_HANDLE;
  33807. }
  33808. bool operator!() const VULKAN_HPP_NOEXCEPT
  33809. {
  33810. return m_surfaceKHR == VK_NULL_HANDLE;
  33811. }
  33812. private:
  33813. VkSurfaceKHR m_surfaceKHR;
  33814. };
  33815. static_assert( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
  33816. template <>
  33817. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eSurfaceKHR>
  33818. {
  33819. using type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  33820. };
  33821. template <>
  33822. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eSurfaceKHR>
  33823. {
  33824. using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  33825. };
  33826. template <>
  33827. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eSurfaceKHR>
  33828. {
  33829. using Type = VULKAN_HPP_NAMESPACE::SurfaceKHR;
  33830. };
  33831. template <>
  33832. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::SurfaceKHR>
  33833. {
  33834. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  33835. };
  33836. struct SwapchainCreateInfoKHR
  33837. {
  33838. static const bool allowDuplicate = false;
  33839. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCreateInfoKHR;
  33840. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33841. VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}, uint32_t minImageCount_ = {}, VULKAN_HPP_NAMESPACE::Format imageFormat_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}, uint32_t imageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ = {}, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {}) VULKAN_HPP_NOEXCEPT
  33842. : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
  33843. {}
  33844. VULKAN_HPP_CONSTEXPR SwapchainCreateInfoKHR( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33845. SwapchainCreateInfoKHR( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  33846. : SwapchainCreateInfoKHR( *reinterpret_cast<SwapchainCreateInfoKHR const *>( &rhs ) )
  33847. {}
  33848. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33849. SwapchainCreateInfoKHR( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_, VULKAN_HPP_NAMESPACE::SurfaceKHR surface_, uint32_t minImageCount_, VULKAN_HPP_NAMESPACE::Format imageFormat_, VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_, uint32_t imageArrayLayers_, VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_, VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate, VULKAN_HPP_NAMESPACE::Bool32 clipped_ = {}, VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ = {} )
  33850. : flags( flags_ ), surface( surface_ ), minImageCount( minImageCount_ ), imageFormat( imageFormat_ ), imageColorSpace( imageColorSpace_ ), imageExtent( imageExtent_ ), imageArrayLayers( imageArrayLayers_ ), imageUsage( imageUsage_ ), imageSharingMode( imageSharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() ), preTransform( preTransform_ ), compositeAlpha( compositeAlpha_ ), presentMode( presentMode_ ), clipped( clipped_ ), oldSwapchain( oldSwapchain_ )
  33851. {}
  33852. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33853. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  33854. VULKAN_HPP_CONSTEXPR_14 SwapchainCreateInfoKHR & operator=( SwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  33855. SwapchainCreateInfoKHR & operator=( VkSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  33856. {
  33857. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR const *>( &rhs );
  33858. return *this;
  33859. }
  33860. SwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  33861. {
  33862. pNext = pNext_;
  33863. return *this;
  33864. }
  33865. SwapchainCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  33866. {
  33867. flags = flags_;
  33868. return *this;
  33869. }
  33870. SwapchainCreateInfoKHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
  33871. {
  33872. surface = surface_;
  33873. return *this;
  33874. }
  33875. SwapchainCreateInfoKHR & setMinImageCount( uint32_t minImageCount_ ) VULKAN_HPP_NOEXCEPT
  33876. {
  33877. minImageCount = minImageCount_;
  33878. return *this;
  33879. }
  33880. SwapchainCreateInfoKHR & setImageFormat( VULKAN_HPP_NAMESPACE::Format imageFormat_ ) VULKAN_HPP_NOEXCEPT
  33881. {
  33882. imageFormat = imageFormat_;
  33883. return *this;
  33884. }
  33885. SwapchainCreateInfoKHR & setImageColorSpace( VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace_ ) VULKAN_HPP_NOEXCEPT
  33886. {
  33887. imageColorSpace = imageColorSpace_;
  33888. return *this;
  33889. }
  33890. SwapchainCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
  33891. {
  33892. imageExtent = imageExtent_;
  33893. return *this;
  33894. }
  33895. SwapchainCreateInfoKHR & setImageArrayLayers( uint32_t imageArrayLayers_ ) VULKAN_HPP_NOEXCEPT
  33896. {
  33897. imageArrayLayers = imageArrayLayers_;
  33898. return *this;
  33899. }
  33900. SwapchainCreateInfoKHR & setImageUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage_ ) VULKAN_HPP_NOEXCEPT
  33901. {
  33902. imageUsage = imageUsage_;
  33903. return *this;
  33904. }
  33905. SwapchainCreateInfoKHR & setImageSharingMode( VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode_ ) VULKAN_HPP_NOEXCEPT
  33906. {
  33907. imageSharingMode = imageSharingMode_;
  33908. return *this;
  33909. }
  33910. SwapchainCreateInfoKHR & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
  33911. {
  33912. queueFamilyIndexCount = queueFamilyIndexCount_;
  33913. return *this;
  33914. }
  33915. SwapchainCreateInfoKHR & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  33916. {
  33917. pQueueFamilyIndices = pQueueFamilyIndices_;
  33918. return *this;
  33919. }
  33920. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33921. SwapchainCreateInfoKHR & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  33922. {
  33923. queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
  33924. pQueueFamilyIndices = queueFamilyIndices_.data();
  33925. return *this;
  33926. }
  33927. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  33928. SwapchainCreateInfoKHR & setPreTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform_ ) VULKAN_HPP_NOEXCEPT
  33929. {
  33930. preTransform = preTransform_;
  33931. return *this;
  33932. }
  33933. SwapchainCreateInfoKHR & setCompositeAlpha( VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha_ ) VULKAN_HPP_NOEXCEPT
  33934. {
  33935. compositeAlpha = compositeAlpha_;
  33936. return *this;
  33937. }
  33938. SwapchainCreateInfoKHR & setPresentMode( VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode_ ) VULKAN_HPP_NOEXCEPT
  33939. {
  33940. presentMode = presentMode_;
  33941. return *this;
  33942. }
  33943. SwapchainCreateInfoKHR & setClipped( VULKAN_HPP_NAMESPACE::Bool32 clipped_ ) VULKAN_HPP_NOEXCEPT
  33944. {
  33945. clipped = clipped_;
  33946. return *this;
  33947. }
  33948. SwapchainCreateInfoKHR & setOldSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain_ ) VULKAN_HPP_NOEXCEPT
  33949. {
  33950. oldSwapchain = oldSwapchain_;
  33951. return *this;
  33952. }
  33953. operator VkSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  33954. {
  33955. return *reinterpret_cast<const VkSwapchainCreateInfoKHR*>( this );
  33956. }
  33957. operator VkSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  33958. {
  33959. return *reinterpret_cast<VkSwapchainCreateInfoKHR*>( this );
  33960. }
  33961. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  33962. auto operator<=>( SwapchainCreateInfoKHR const& ) const = default;
  33963. #else
  33964. bool operator==( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  33965. {
  33966. return ( sType == rhs.sType )
  33967. && ( pNext == rhs.pNext )
  33968. && ( flags == rhs.flags )
  33969. && ( surface == rhs.surface )
  33970. && ( minImageCount == rhs.minImageCount )
  33971. && ( imageFormat == rhs.imageFormat )
  33972. && ( imageColorSpace == rhs.imageColorSpace )
  33973. && ( imageExtent == rhs.imageExtent )
  33974. && ( imageArrayLayers == rhs.imageArrayLayers )
  33975. && ( imageUsage == rhs.imageUsage )
  33976. && ( imageSharingMode == rhs.imageSharingMode )
  33977. && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
  33978. && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices )
  33979. && ( preTransform == rhs.preTransform )
  33980. && ( compositeAlpha == rhs.compositeAlpha )
  33981. && ( presentMode == rhs.presentMode )
  33982. && ( clipped == rhs.clipped )
  33983. && ( oldSwapchain == rhs.oldSwapchain );
  33984. }
  33985. bool operator!=( SwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  33986. {
  33987. return !operator==( rhs );
  33988. }
  33989. #endif
  33990. public:
  33991. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCreateInfoKHR;
  33992. const void* pNext = {};
  33993. VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR flags = {};
  33994. VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
  33995. uint32_t minImageCount = {};
  33996. VULKAN_HPP_NAMESPACE::Format imageFormat = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  33997. VULKAN_HPP_NAMESPACE::ColorSpaceKHR imageColorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
  33998. VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
  33999. uint32_t imageArrayLayers = {};
  34000. VULKAN_HPP_NAMESPACE::ImageUsageFlags imageUsage = {};
  34001. VULKAN_HPP_NAMESPACE::SharingMode imageSharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
  34002. uint32_t queueFamilyIndexCount = {};
  34003. const uint32_t* pQueueFamilyIndices = {};
  34004. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR preTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  34005. VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR compositeAlpha = VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR::eOpaque;
  34006. VULKAN_HPP_NAMESPACE::PresentModeKHR presentMode = VULKAN_HPP_NAMESPACE::PresentModeKHR::eImmediate;
  34007. VULKAN_HPP_NAMESPACE::Bool32 clipped = {};
  34008. VULKAN_HPP_NAMESPACE::SwapchainKHR oldSwapchain = {};
  34009. };
  34010. static_assert( sizeof( SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
  34011. static_assert( std::is_standard_layout<SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  34012. template <>
  34013. struct CppType<StructureType, StructureType::eSwapchainCreateInfoKHR>
  34014. {
  34015. using Type = SwapchainCreateInfoKHR;
  34016. };
  34017. struct ValidationCacheCreateInfoEXT
  34018. {
  34019. static const bool allowDuplicate = false;
  34020. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationCacheCreateInfoEXT;
  34021. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34022. VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ = {}, size_t initialDataSize_ = {}, const void* pInitialData_ = {}) VULKAN_HPP_NOEXCEPT
  34023. : flags( flags_ ), initialDataSize( initialDataSize_ ), pInitialData( pInitialData_ )
  34024. {}
  34025. VULKAN_HPP_CONSTEXPR ValidationCacheCreateInfoEXT( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34026. ValidationCacheCreateInfoEXT( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  34027. : ValidationCacheCreateInfoEXT( *reinterpret_cast<ValidationCacheCreateInfoEXT const *>( &rhs ) )
  34028. {}
  34029. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34030. template <typename T>
  34031. ValidationCacheCreateInfoEXT( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ )
  34032. : flags( flags_ ), initialDataSize( initialData_.size() * sizeof(T) ), pInitialData( initialData_.data() )
  34033. {}
  34034. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34035. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34036. VULKAN_HPP_CONSTEXPR_14 ValidationCacheCreateInfoEXT & operator=( ValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34037. ValidationCacheCreateInfoEXT & operator=( VkValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  34038. {
  34039. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT const *>( &rhs );
  34040. return *this;
  34041. }
  34042. ValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34043. {
  34044. pNext = pNext_;
  34045. return *this;
  34046. }
  34047. ValidationCacheCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  34048. {
  34049. flags = flags_;
  34050. return *this;
  34051. }
  34052. ValidationCacheCreateInfoEXT & setInitialDataSize( size_t initialDataSize_ ) VULKAN_HPP_NOEXCEPT
  34053. {
  34054. initialDataSize = initialDataSize_;
  34055. return *this;
  34056. }
  34057. ValidationCacheCreateInfoEXT & setPInitialData( const void* pInitialData_ ) VULKAN_HPP_NOEXCEPT
  34058. {
  34059. pInitialData = pInitialData_;
  34060. return *this;
  34061. }
  34062. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34063. template <typename T>
  34064. ValidationCacheCreateInfoEXT & setInitialData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & initialData_ ) VULKAN_HPP_NOEXCEPT
  34065. {
  34066. initialDataSize = initialData_.size() * sizeof(T);
  34067. pInitialData = initialData_.data();
  34068. return *this;
  34069. }
  34070. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34071. operator VkValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  34072. {
  34073. return *reinterpret_cast<const VkValidationCacheCreateInfoEXT*>( this );
  34074. }
  34075. operator VkValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  34076. {
  34077. return *reinterpret_cast<VkValidationCacheCreateInfoEXT*>( this );
  34078. }
  34079. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34080. auto operator<=>( ValidationCacheCreateInfoEXT const& ) const = default;
  34081. #else
  34082. bool operator==( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  34083. {
  34084. return ( sType == rhs.sType )
  34085. && ( pNext == rhs.pNext )
  34086. && ( flags == rhs.flags )
  34087. && ( initialDataSize == rhs.initialDataSize )
  34088. && ( pInitialData == rhs.pInitialData );
  34089. }
  34090. bool operator!=( ValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  34091. {
  34092. return !operator==( rhs );
  34093. }
  34094. #endif
  34095. public:
  34096. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationCacheCreateInfoEXT;
  34097. const void* pNext = {};
  34098. VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT flags = {};
  34099. size_t initialDataSize = {};
  34100. const void* pInitialData = {};
  34101. };
  34102. static_assert( sizeof( ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
  34103. static_assert( std::is_standard_layout<ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  34104. template <>
  34105. struct CppType<StructureType, StructureType::eValidationCacheCreateInfoEXT>
  34106. {
  34107. using Type = ValidationCacheCreateInfoEXT;
  34108. };
  34109. class ValidationCacheEXT
  34110. {
  34111. public:
  34112. using CType = VkValidationCacheEXT;
  34113. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT;
  34114. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT;
  34115. public:
  34116. VULKAN_HPP_CONSTEXPR ValidationCacheEXT() VULKAN_HPP_NOEXCEPT
  34117. : m_validationCacheEXT(VK_NULL_HANDLE)
  34118. {}
  34119. VULKAN_HPP_CONSTEXPR ValidationCacheEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  34120. : m_validationCacheEXT(VK_NULL_HANDLE)
  34121. {}
  34122. VULKAN_HPP_TYPESAFE_EXPLICIT ValidationCacheEXT( VkValidationCacheEXT validationCacheEXT ) VULKAN_HPP_NOEXCEPT
  34123. : m_validationCacheEXT( validationCacheEXT )
  34124. {}
  34125. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  34126. ValidationCacheEXT & operator=(VkValidationCacheEXT validationCacheEXT) VULKAN_HPP_NOEXCEPT
  34127. {
  34128. m_validationCacheEXT = validationCacheEXT;
  34129. return *this;
  34130. }
  34131. #endif
  34132. ValidationCacheEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  34133. {
  34134. m_validationCacheEXT = VK_NULL_HANDLE;
  34135. return *this;
  34136. }
  34137. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34138. auto operator<=>( ValidationCacheEXT const& ) const = default;
  34139. #else
  34140. bool operator==( ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  34141. {
  34142. return m_validationCacheEXT == rhs.m_validationCacheEXT;
  34143. }
  34144. bool operator!=(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  34145. {
  34146. return m_validationCacheEXT != rhs.m_validationCacheEXT;
  34147. }
  34148. bool operator<(ValidationCacheEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  34149. {
  34150. return m_validationCacheEXT < rhs.m_validationCacheEXT;
  34151. }
  34152. #endif
  34153. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkValidationCacheEXT() const VULKAN_HPP_NOEXCEPT
  34154. {
  34155. return m_validationCacheEXT;
  34156. }
  34157. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  34158. {
  34159. return m_validationCacheEXT != VK_NULL_HANDLE;
  34160. }
  34161. bool operator!() const VULKAN_HPP_NOEXCEPT
  34162. {
  34163. return m_validationCacheEXT == VK_NULL_HANDLE;
  34164. }
  34165. private:
  34166. VkValidationCacheEXT m_validationCacheEXT;
  34167. };
  34168. static_assert( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
  34169. template <>
  34170. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eValidationCacheEXT>
  34171. {
  34172. using type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  34173. };
  34174. template <>
  34175. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eValidationCacheEXT>
  34176. {
  34177. using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  34178. };
  34179. template <>
  34180. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eValidationCacheEXT>
  34181. {
  34182. using Type = VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
  34183. };
  34184. template <>
  34185. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
  34186. {
  34187. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  34188. };
  34189. struct DisplayPowerInfoEXT
  34190. {
  34191. static const bool allowDuplicate = false;
  34192. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPowerInfoEXT;
  34193. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34194. VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT(VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff) VULKAN_HPP_NOEXCEPT
  34195. : powerState( powerState_ )
  34196. {}
  34197. VULKAN_HPP_CONSTEXPR DisplayPowerInfoEXT( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34198. DisplayPowerInfoEXT( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  34199. : DisplayPowerInfoEXT( *reinterpret_cast<DisplayPowerInfoEXT const *>( &rhs ) )
  34200. {}
  34201. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34202. VULKAN_HPP_CONSTEXPR_14 DisplayPowerInfoEXT & operator=( DisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34203. DisplayPowerInfoEXT & operator=( VkDisplayPowerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  34204. {
  34205. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT const *>( &rhs );
  34206. return *this;
  34207. }
  34208. DisplayPowerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34209. {
  34210. pNext = pNext_;
  34211. return *this;
  34212. }
  34213. DisplayPowerInfoEXT & setPowerState( VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState_ ) VULKAN_HPP_NOEXCEPT
  34214. {
  34215. powerState = powerState_;
  34216. return *this;
  34217. }
  34218. operator VkDisplayPowerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  34219. {
  34220. return *reinterpret_cast<const VkDisplayPowerInfoEXT*>( this );
  34221. }
  34222. operator VkDisplayPowerInfoEXT &() VULKAN_HPP_NOEXCEPT
  34223. {
  34224. return *reinterpret_cast<VkDisplayPowerInfoEXT*>( this );
  34225. }
  34226. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34227. auto operator<=>( DisplayPowerInfoEXT const& ) const = default;
  34228. #else
  34229. bool operator==( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  34230. {
  34231. return ( sType == rhs.sType )
  34232. && ( pNext == rhs.pNext )
  34233. && ( powerState == rhs.powerState );
  34234. }
  34235. bool operator!=( DisplayPowerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  34236. {
  34237. return !operator==( rhs );
  34238. }
  34239. #endif
  34240. public:
  34241. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPowerInfoEXT;
  34242. const void* pNext = {};
  34243. VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT powerState = VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT::eOff;
  34244. };
  34245. static_assert( sizeof( DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
  34246. static_assert( std::is_standard_layout<DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
  34247. template <>
  34248. struct CppType<StructureType, StructureType::eDisplayPowerInfoEXT>
  34249. {
  34250. using Type = DisplayPowerInfoEXT;
  34251. };
  34252. struct MappedMemoryRange
  34253. {
  34254. static const bool allowDuplicate = false;
  34255. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMappedMemoryRange;
  34256. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34257. VULKAN_HPP_CONSTEXPR MappedMemoryRange(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  34258. : memory( memory_ ), offset( offset_ ), size( size_ )
  34259. {}
  34260. VULKAN_HPP_CONSTEXPR MappedMemoryRange( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34261. MappedMemoryRange( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
  34262. : MappedMemoryRange( *reinterpret_cast<MappedMemoryRange const *>( &rhs ) )
  34263. {}
  34264. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34265. VULKAN_HPP_CONSTEXPR_14 MappedMemoryRange & operator=( MappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34266. MappedMemoryRange & operator=( VkMappedMemoryRange const & rhs ) VULKAN_HPP_NOEXCEPT
  34267. {
  34268. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MappedMemoryRange const *>( &rhs );
  34269. return *this;
  34270. }
  34271. MappedMemoryRange & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34272. {
  34273. pNext = pNext_;
  34274. return *this;
  34275. }
  34276. MappedMemoryRange & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  34277. {
  34278. memory = memory_;
  34279. return *this;
  34280. }
  34281. MappedMemoryRange & setOffset( VULKAN_HPP_NAMESPACE::DeviceSize offset_ ) VULKAN_HPP_NOEXCEPT
  34282. {
  34283. offset = offset_;
  34284. return *this;
  34285. }
  34286. MappedMemoryRange & setSize( VULKAN_HPP_NAMESPACE::DeviceSize size_ ) VULKAN_HPP_NOEXCEPT
  34287. {
  34288. size = size_;
  34289. return *this;
  34290. }
  34291. operator VkMappedMemoryRange const&() const VULKAN_HPP_NOEXCEPT
  34292. {
  34293. return *reinterpret_cast<const VkMappedMemoryRange*>( this );
  34294. }
  34295. operator VkMappedMemoryRange &() VULKAN_HPP_NOEXCEPT
  34296. {
  34297. return *reinterpret_cast<VkMappedMemoryRange*>( this );
  34298. }
  34299. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34300. auto operator<=>( MappedMemoryRange const& ) const = default;
  34301. #else
  34302. bool operator==( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  34303. {
  34304. return ( sType == rhs.sType )
  34305. && ( pNext == rhs.pNext )
  34306. && ( memory == rhs.memory )
  34307. && ( offset == rhs.offset )
  34308. && ( size == rhs.size );
  34309. }
  34310. bool operator!=( MappedMemoryRange const& rhs ) const VULKAN_HPP_NOEXCEPT
  34311. {
  34312. return !operator==( rhs );
  34313. }
  34314. #endif
  34315. public:
  34316. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMappedMemoryRange;
  34317. const void* pNext = {};
  34318. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  34319. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  34320. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  34321. };
  34322. static_assert( sizeof( MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
  34323. static_assert( std::is_standard_layout<MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
  34324. template <>
  34325. struct CppType<StructureType, StructureType::eMappedMemoryRange>
  34326. {
  34327. using Type = MappedMemoryRange;
  34328. };
  34329. struct MemoryRequirements
  34330. {
  34331. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34332. VULKAN_HPP_CONSTEXPR MemoryRequirements(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize alignment_ = {}, uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
  34333. : size( size_ ), alignment( alignment_ ), memoryTypeBits( memoryTypeBits_ )
  34334. {}
  34335. VULKAN_HPP_CONSTEXPR MemoryRequirements( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34336. MemoryRequirements( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  34337. : MemoryRequirements( *reinterpret_cast<MemoryRequirements const *>( &rhs ) )
  34338. {}
  34339. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34340. VULKAN_HPP_CONSTEXPR_14 MemoryRequirements & operator=( MemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34341. MemoryRequirements & operator=( VkMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  34342. {
  34343. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements const *>( &rhs );
  34344. return *this;
  34345. }
  34346. operator VkMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
  34347. {
  34348. return *reinterpret_cast<const VkMemoryRequirements*>( this );
  34349. }
  34350. operator VkMemoryRequirements &() VULKAN_HPP_NOEXCEPT
  34351. {
  34352. return *reinterpret_cast<VkMemoryRequirements*>( this );
  34353. }
  34354. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34355. auto operator<=>( MemoryRequirements const& ) const = default;
  34356. #else
  34357. bool operator==( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  34358. {
  34359. return ( size == rhs.size )
  34360. && ( alignment == rhs.alignment )
  34361. && ( memoryTypeBits == rhs.memoryTypeBits );
  34362. }
  34363. bool operator!=( MemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  34364. {
  34365. return !operator==( rhs );
  34366. }
  34367. #endif
  34368. public:
  34369. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  34370. VULKAN_HPP_NAMESPACE::DeviceSize alignment = {};
  34371. uint32_t memoryTypeBits = {};
  34372. };
  34373. static_assert( sizeof( MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
  34374. static_assert( std::is_standard_layout<MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
  34375. struct MemoryRequirements2
  34376. {
  34377. static const bool allowDuplicate = false;
  34378. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryRequirements2;
  34379. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34380. VULKAN_HPP_CONSTEXPR MemoryRequirements2(VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
  34381. : memoryRequirements( memoryRequirements_ )
  34382. {}
  34383. VULKAN_HPP_CONSTEXPR MemoryRequirements2( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34384. MemoryRequirements2( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
  34385. : MemoryRequirements2( *reinterpret_cast<MemoryRequirements2 const *>( &rhs ) )
  34386. {}
  34387. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34388. VULKAN_HPP_CONSTEXPR_14 MemoryRequirements2 & operator=( MemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34389. MemoryRequirements2 & operator=( VkMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
  34390. {
  34391. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryRequirements2 const *>( &rhs );
  34392. return *this;
  34393. }
  34394. operator VkMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
  34395. {
  34396. return *reinterpret_cast<const VkMemoryRequirements2*>( this );
  34397. }
  34398. operator VkMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
  34399. {
  34400. return *reinterpret_cast<VkMemoryRequirements2*>( this );
  34401. }
  34402. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34403. auto operator<=>( MemoryRequirements2 const& ) const = default;
  34404. #else
  34405. bool operator==( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  34406. {
  34407. return ( sType == rhs.sType )
  34408. && ( pNext == rhs.pNext )
  34409. && ( memoryRequirements == rhs.memoryRequirements );
  34410. }
  34411. bool operator!=( MemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  34412. {
  34413. return !operator==( rhs );
  34414. }
  34415. #endif
  34416. public:
  34417. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryRequirements2;
  34418. void* pNext = {};
  34419. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements = {};
  34420. };
  34421. static_assert( sizeof( MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
  34422. static_assert( std::is_standard_layout<MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
  34423. template <>
  34424. struct CppType<StructureType, StructureType::eMemoryRequirements2>
  34425. {
  34426. using Type = MemoryRequirements2;
  34427. };
  34428. using MemoryRequirements2KHR = MemoryRequirements2;
  34429. struct DeviceGroupPresentCapabilitiesKHR
  34430. {
  34431. static const bool allowDuplicate = false;
  34432. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
  34433. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34434. VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR(std::array<uint32_t,VK_MAX_DEVICE_GROUP_SIZE> const& presentMask_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
  34435. : presentMask( presentMask_ ), modes( modes_ )
  34436. {}
  34437. VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34438. DeviceGroupPresentCapabilitiesKHR( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34439. : DeviceGroupPresentCapabilitiesKHR( *reinterpret_cast<DeviceGroupPresentCapabilitiesKHR const *>( &rhs ) )
  34440. {}
  34441. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34442. VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentCapabilitiesKHR & operator=( DeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34443. DeviceGroupPresentCapabilitiesKHR & operator=( VkDeviceGroupPresentCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34444. {
  34445. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR const *>( &rhs );
  34446. return *this;
  34447. }
  34448. operator VkDeviceGroupPresentCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
  34449. {
  34450. return *reinterpret_cast<const VkDeviceGroupPresentCapabilitiesKHR*>( this );
  34451. }
  34452. operator VkDeviceGroupPresentCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
  34453. {
  34454. return *reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR*>( this );
  34455. }
  34456. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34457. auto operator<=>( DeviceGroupPresentCapabilitiesKHR const& ) const = default;
  34458. #else
  34459. bool operator==( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34460. {
  34461. return ( sType == rhs.sType )
  34462. && ( pNext == rhs.pNext )
  34463. && ( presentMask == rhs.presentMask )
  34464. && ( modes == rhs.modes );
  34465. }
  34466. bool operator!=( DeviceGroupPresentCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34467. {
  34468. return !operator==( rhs );
  34469. }
  34470. #endif
  34471. public:
  34472. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentCapabilitiesKHR;
  34473. const void* pNext = {};
  34474. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, VK_MAX_DEVICE_GROUP_SIZE> presentMask = {};
  34475. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
  34476. };
  34477. static_assert( sizeof( DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ), "struct and wrapper have different size!" );
  34478. static_assert( std::is_standard_layout<DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
  34479. template <>
  34480. struct CppType<StructureType, StructureType::eDeviceGroupPresentCapabilitiesKHR>
  34481. {
  34482. using Type = DeviceGroupPresentCapabilitiesKHR;
  34483. };
  34484. struct PhysicalDeviceSurfaceInfo2KHR
  34485. {
  34486. static const bool allowDuplicate = false;
  34487. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
  34488. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34489. VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR(VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ = {}) VULKAN_HPP_NOEXCEPT
  34490. : surface( surface_ )
  34491. {}
  34492. VULKAN_HPP_CONSTEXPR PhysicalDeviceSurfaceInfo2KHR( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34493. PhysicalDeviceSurfaceInfo2KHR( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34494. : PhysicalDeviceSurfaceInfo2KHR( *reinterpret_cast<PhysicalDeviceSurfaceInfo2KHR const *>( &rhs ) )
  34495. {}
  34496. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34497. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSurfaceInfo2KHR & operator=( PhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34498. PhysicalDeviceSurfaceInfo2KHR & operator=( VkPhysicalDeviceSurfaceInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34499. {
  34500. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR const *>( &rhs );
  34501. return *this;
  34502. }
  34503. PhysicalDeviceSurfaceInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34504. {
  34505. pNext = pNext_;
  34506. return *this;
  34507. }
  34508. PhysicalDeviceSurfaceInfo2KHR & setSurface( VULKAN_HPP_NAMESPACE::SurfaceKHR surface_ ) VULKAN_HPP_NOEXCEPT
  34509. {
  34510. surface = surface_;
  34511. return *this;
  34512. }
  34513. operator VkPhysicalDeviceSurfaceInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  34514. {
  34515. return *reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR*>( this );
  34516. }
  34517. operator VkPhysicalDeviceSurfaceInfo2KHR &() VULKAN_HPP_NOEXCEPT
  34518. {
  34519. return *reinterpret_cast<VkPhysicalDeviceSurfaceInfo2KHR*>( this );
  34520. }
  34521. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34522. auto operator<=>( PhysicalDeviceSurfaceInfo2KHR const& ) const = default;
  34523. #else
  34524. bool operator==( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34525. {
  34526. return ( sType == rhs.sType )
  34527. && ( pNext == rhs.pNext )
  34528. && ( surface == rhs.surface );
  34529. }
  34530. bool operator!=( PhysicalDeviceSurfaceInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34531. {
  34532. return !operator==( rhs );
  34533. }
  34534. #endif
  34535. public:
  34536. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSurfaceInfo2KHR;
  34537. const void* pNext = {};
  34538. VULKAN_HPP_NAMESPACE::SurfaceKHR surface = {};
  34539. };
  34540. static_assert( sizeof( PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ), "struct and wrapper have different size!" );
  34541. static_assert( std::is_standard_layout<PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  34542. template <>
  34543. struct CppType<StructureType, StructureType::ePhysicalDeviceSurfaceInfo2KHR>
  34544. {
  34545. using Type = PhysicalDeviceSurfaceInfo2KHR;
  34546. };
  34547. struct DeviceMemoryOpaqueCaptureAddressInfo
  34548. {
  34549. static const bool allowDuplicate = false;
  34550. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
  34551. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34552. VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
  34553. : memory( memory_ )
  34554. {}
  34555. VULKAN_HPP_CONSTEXPR DeviceMemoryOpaqueCaptureAddressInfo( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34556. DeviceMemoryOpaqueCaptureAddressInfo( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  34557. : DeviceMemoryOpaqueCaptureAddressInfo( *reinterpret_cast<DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs ) )
  34558. {}
  34559. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34560. VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOpaqueCaptureAddressInfo & operator=( DeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34561. DeviceMemoryOpaqueCaptureAddressInfo & operator=( VkDeviceMemoryOpaqueCaptureAddressInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  34562. {
  34563. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo const *>( &rhs );
  34564. return *this;
  34565. }
  34566. DeviceMemoryOpaqueCaptureAddressInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34567. {
  34568. pNext = pNext_;
  34569. return *this;
  34570. }
  34571. DeviceMemoryOpaqueCaptureAddressInfo & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  34572. {
  34573. memory = memory_;
  34574. return *this;
  34575. }
  34576. operator VkDeviceMemoryOpaqueCaptureAddressInfo const&() const VULKAN_HPP_NOEXCEPT
  34577. {
  34578. return *reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
  34579. }
  34580. operator VkDeviceMemoryOpaqueCaptureAddressInfo &() VULKAN_HPP_NOEXCEPT
  34581. {
  34582. return *reinterpret_cast<VkDeviceMemoryOpaqueCaptureAddressInfo*>( this );
  34583. }
  34584. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34585. auto operator<=>( DeviceMemoryOpaqueCaptureAddressInfo const& ) const = default;
  34586. #else
  34587. bool operator==( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  34588. {
  34589. return ( sType == rhs.sType )
  34590. && ( pNext == rhs.pNext )
  34591. && ( memory == rhs.memory );
  34592. }
  34593. bool operator!=( DeviceMemoryOpaqueCaptureAddressInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  34594. {
  34595. return !operator==( rhs );
  34596. }
  34597. #endif
  34598. public:
  34599. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOpaqueCaptureAddressInfo;
  34600. const void* pNext = {};
  34601. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  34602. };
  34603. static_assert( sizeof( DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ), "struct and wrapper have different size!" );
  34604. static_assert( std::is_standard_layout<DeviceMemoryOpaqueCaptureAddressInfo>::value, "struct wrapper is not a standard layout!" );
  34605. template <>
  34606. struct CppType<StructureType, StructureType::eDeviceMemoryOpaqueCaptureAddressInfo>
  34607. {
  34608. using Type = DeviceMemoryOpaqueCaptureAddressInfo;
  34609. };
  34610. using DeviceMemoryOpaqueCaptureAddressInfoKHR = DeviceMemoryOpaqueCaptureAddressInfo;
  34611. struct PresentInfoKHR
  34612. {
  34613. static const bool allowDuplicate = false;
  34614. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentInfoKHR;
  34615. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34616. VULKAN_HPP_CONSTEXPR PresentInfoKHR(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ = {}, const uint32_t* pImageIndices_ = {}, VULKAN_HPP_NAMESPACE::Result* pResults_ = {}) VULKAN_HPP_NOEXCEPT
  34617. : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), swapchainCount( swapchainCount_ ), pSwapchains( pSwapchains_ ), pImageIndices( pImageIndices_ ), pResults( pResults_ )
  34618. {}
  34619. VULKAN_HPP_CONSTEXPR PresentInfoKHR( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34620. PresentInfoKHR( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34621. : PresentInfoKHR( *reinterpret_cast<PresentInfoKHR const *>( &rhs ) )
  34622. {}
  34623. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34624. PresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ = {} )
  34625. : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), swapchainCount( static_cast<uint32_t>( swapchains_.size() ) ), pSwapchains( swapchains_.data() ), pImageIndices( imageIndices_.data() ), pResults( results_.data() )
  34626. {
  34627. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  34628. VULKAN_HPP_ASSERT( swapchains_.size() == imageIndices_.size() );
  34629. VULKAN_HPP_ASSERT( results_.empty() || ( swapchains_.size() == results_.size() ) );
  34630. VULKAN_HPP_ASSERT( results_.empty() || ( imageIndices_.size() == results_.size() ) );
  34631. #else
  34632. if ( swapchains_.size() != imageIndices_.size() )
  34633. {
  34634. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: swapchains_.size() != imageIndices_.size()" );
  34635. }
  34636. if ( !results_.empty() && ( swapchains_.size() != results_.size() ) )
  34637. {
  34638. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( swapchains_.size() != results_.size() )" );
  34639. }
  34640. if ( !results_.empty() && ( imageIndices_.size() != results_.size() ) )
  34641. {
  34642. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::PresentInfoKHR::PresentInfoKHR: !results_.empty() && ( imageIndices_.size() != results_.size() )" );
  34643. }
  34644. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  34645. }
  34646. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34647. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34648. VULKAN_HPP_CONSTEXPR_14 PresentInfoKHR & operator=( PresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34649. PresentInfoKHR & operator=( VkPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  34650. {
  34651. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentInfoKHR const *>( &rhs );
  34652. return *this;
  34653. }
  34654. PresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34655. {
  34656. pNext = pNext_;
  34657. return *this;
  34658. }
  34659. PresentInfoKHR & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  34660. {
  34661. waitSemaphoreCount = waitSemaphoreCount_;
  34662. return *this;
  34663. }
  34664. PresentInfoKHR & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34665. {
  34666. pWaitSemaphores = pWaitSemaphores_;
  34667. return *this;
  34668. }
  34669. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34670. PresentInfoKHR & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34671. {
  34672. waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
  34673. pWaitSemaphores = waitSemaphores_.data();
  34674. return *this;
  34675. }
  34676. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34677. PresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
  34678. {
  34679. swapchainCount = swapchainCount_;
  34680. return *this;
  34681. }
  34682. PresentInfoKHR & setPSwapchains( const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains_ ) VULKAN_HPP_NOEXCEPT
  34683. {
  34684. pSwapchains = pSwapchains_;
  34685. return *this;
  34686. }
  34687. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34688. PresentInfoKHR & setSwapchains( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains_ ) VULKAN_HPP_NOEXCEPT
  34689. {
  34690. swapchainCount = static_cast<uint32_t>( swapchains_.size() );
  34691. pSwapchains = swapchains_.data();
  34692. return *this;
  34693. }
  34694. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34695. PresentInfoKHR & setPImageIndices( const uint32_t* pImageIndices_ ) VULKAN_HPP_NOEXCEPT
  34696. {
  34697. pImageIndices = pImageIndices_;
  34698. return *this;
  34699. }
  34700. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34701. PresentInfoKHR & setImageIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & imageIndices_ ) VULKAN_HPP_NOEXCEPT
  34702. {
  34703. swapchainCount = static_cast<uint32_t>( imageIndices_.size() );
  34704. pImageIndices = imageIndices_.data();
  34705. return *this;
  34706. }
  34707. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34708. PresentInfoKHR & setPResults( VULKAN_HPP_NAMESPACE::Result* pResults_ ) VULKAN_HPP_NOEXCEPT
  34709. {
  34710. pResults = pResults_;
  34711. return *this;
  34712. }
  34713. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34714. PresentInfoKHR & setResults( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::Result> const & results_ ) VULKAN_HPP_NOEXCEPT
  34715. {
  34716. swapchainCount = static_cast<uint32_t>( results_.size() );
  34717. pResults = results_.data();
  34718. return *this;
  34719. }
  34720. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34721. operator VkPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  34722. {
  34723. return *reinterpret_cast<const VkPresentInfoKHR*>( this );
  34724. }
  34725. operator VkPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
  34726. {
  34727. return *reinterpret_cast<VkPresentInfoKHR*>( this );
  34728. }
  34729. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34730. auto operator<=>( PresentInfoKHR const& ) const = default;
  34731. #else
  34732. bool operator==( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34733. {
  34734. return ( sType == rhs.sType )
  34735. && ( pNext == rhs.pNext )
  34736. && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
  34737. && ( pWaitSemaphores == rhs.pWaitSemaphores )
  34738. && ( swapchainCount == rhs.swapchainCount )
  34739. && ( pSwapchains == rhs.pSwapchains )
  34740. && ( pImageIndices == rhs.pImageIndices )
  34741. && ( pResults == rhs.pResults );
  34742. }
  34743. bool operator!=( PresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  34744. {
  34745. return !operator==( rhs );
  34746. }
  34747. #endif
  34748. public:
  34749. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentInfoKHR;
  34750. const void* pNext = {};
  34751. uint32_t waitSemaphoreCount = {};
  34752. const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
  34753. uint32_t swapchainCount = {};
  34754. const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains = {};
  34755. const uint32_t* pImageIndices = {};
  34756. VULKAN_HPP_NAMESPACE::Result* pResults = {};
  34757. };
  34758. static_assert( sizeof( PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
  34759. static_assert( std::is_standard_layout<PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
  34760. template <>
  34761. struct CppType<StructureType, StructureType::ePresentInfoKHR>
  34762. {
  34763. using Type = PresentInfoKHR;
  34764. };
  34765. struct SubmitInfo
  34766. {
  34767. static const bool allowDuplicate = false;
  34768. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubmitInfo;
  34769. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34770. VULKAN_HPP_CONSTEXPR SubmitInfo(uint32_t waitSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ = {}, const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ = {}, uint32_t commandBufferCount_ = {}, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ = {}, uint32_t signalSemaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ = {}) VULKAN_HPP_NOEXCEPT
  34771. : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphores( pWaitSemaphores_ ), pWaitDstStageMask( pWaitDstStageMask_ ), commandBufferCount( commandBufferCount_ ), pCommandBuffers( pCommandBuffers_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphores( pSignalSemaphores_ )
  34772. {}
  34773. VULKAN_HPP_CONSTEXPR SubmitInfo( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34774. SubmitInfo( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  34775. : SubmitInfo( *reinterpret_cast<SubmitInfo const *>( &rhs ) )
  34776. {}
  34777. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34778. SubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ = {} )
  34779. : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphores_.size() ) ), pWaitSemaphores( waitSemaphores_.data() ), pWaitDstStageMask( waitDstStageMask_.data() ), commandBufferCount( static_cast<uint32_t>( commandBuffers_.size() ) ), pCommandBuffers( commandBuffers_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphores_.size() ) ), pSignalSemaphores( signalSemaphores_.data() )
  34780. {
  34781. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  34782. VULKAN_HPP_ASSERT( waitSemaphores_.size() == waitDstStageMask_.size() );
  34783. #else
  34784. if ( waitSemaphores_.size() != waitDstStageMask_.size() )
  34785. {
  34786. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SubmitInfo::SubmitInfo: waitSemaphores_.size() != waitDstStageMask_.size()" );
  34787. }
  34788. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  34789. }
  34790. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34791. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  34792. VULKAN_HPP_CONSTEXPR_14 SubmitInfo & operator=( SubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  34793. SubmitInfo & operator=( VkSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  34794. {
  34795. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubmitInfo const *>( &rhs );
  34796. return *this;
  34797. }
  34798. SubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  34799. {
  34800. pNext = pNext_;
  34801. return *this;
  34802. }
  34803. SubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  34804. {
  34805. waitSemaphoreCount = waitSemaphoreCount_;
  34806. return *this;
  34807. }
  34808. SubmitInfo & setPWaitSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34809. {
  34810. pWaitSemaphores = pWaitSemaphores_;
  34811. return *this;
  34812. }
  34813. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34814. SubmitInfo & setWaitSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & waitSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34815. {
  34816. waitSemaphoreCount = static_cast<uint32_t>( waitSemaphores_.size() );
  34817. pWaitSemaphores = waitSemaphores_.data();
  34818. return *this;
  34819. }
  34820. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34821. SubmitInfo & setPWaitDstStageMask( const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
  34822. {
  34823. pWaitDstStageMask = pWaitDstStageMask_;
  34824. return *this;
  34825. }
  34826. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34827. SubmitInfo & setWaitDstStageMask( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineStageFlags> const & waitDstStageMask_ ) VULKAN_HPP_NOEXCEPT
  34828. {
  34829. waitSemaphoreCount = static_cast<uint32_t>( waitDstStageMask_.size() );
  34830. pWaitDstStageMask = waitDstStageMask_.data();
  34831. return *this;
  34832. }
  34833. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34834. SubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
  34835. {
  34836. commandBufferCount = commandBufferCount_;
  34837. return *this;
  34838. }
  34839. SubmitInfo & setPCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers_ ) VULKAN_HPP_NOEXCEPT
  34840. {
  34841. pCommandBuffers = pCommandBuffers_;
  34842. return *this;
  34843. }
  34844. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34845. SubmitInfo & setCommandBuffers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers_ ) VULKAN_HPP_NOEXCEPT
  34846. {
  34847. commandBufferCount = static_cast<uint32_t>( commandBuffers_.size() );
  34848. pCommandBuffers = commandBuffers_.data();
  34849. return *this;
  34850. }
  34851. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34852. SubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  34853. {
  34854. signalSemaphoreCount = signalSemaphoreCount_;
  34855. return *this;
  34856. }
  34857. SubmitInfo & setPSignalSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34858. {
  34859. pSignalSemaphores = pSignalSemaphores_;
  34860. return *this;
  34861. }
  34862. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34863. SubmitInfo & setSignalSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & signalSemaphores_ ) VULKAN_HPP_NOEXCEPT
  34864. {
  34865. signalSemaphoreCount = static_cast<uint32_t>( signalSemaphores_.size() );
  34866. pSignalSemaphores = signalSemaphores_.data();
  34867. return *this;
  34868. }
  34869. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  34870. operator VkSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
  34871. {
  34872. return *reinterpret_cast<const VkSubmitInfo*>( this );
  34873. }
  34874. operator VkSubmitInfo &() VULKAN_HPP_NOEXCEPT
  34875. {
  34876. return *reinterpret_cast<VkSubmitInfo*>( this );
  34877. }
  34878. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34879. auto operator<=>( SubmitInfo const& ) const = default;
  34880. #else
  34881. bool operator==( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  34882. {
  34883. return ( sType == rhs.sType )
  34884. && ( pNext == rhs.pNext )
  34885. && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
  34886. && ( pWaitSemaphores == rhs.pWaitSemaphores )
  34887. && ( pWaitDstStageMask == rhs.pWaitDstStageMask )
  34888. && ( commandBufferCount == rhs.commandBufferCount )
  34889. && ( pCommandBuffers == rhs.pCommandBuffers )
  34890. && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
  34891. && ( pSignalSemaphores == rhs.pSignalSemaphores );
  34892. }
  34893. bool operator!=( SubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  34894. {
  34895. return !operator==( rhs );
  34896. }
  34897. #endif
  34898. public:
  34899. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubmitInfo;
  34900. const void* pNext = {};
  34901. uint32_t waitSemaphoreCount = {};
  34902. const VULKAN_HPP_NAMESPACE::Semaphore* pWaitSemaphores = {};
  34903. const VULKAN_HPP_NAMESPACE::PipelineStageFlags* pWaitDstStageMask = {};
  34904. uint32_t commandBufferCount = {};
  34905. const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers = {};
  34906. uint32_t signalSemaphoreCount = {};
  34907. const VULKAN_HPP_NAMESPACE::Semaphore* pSignalSemaphores = {};
  34908. };
  34909. static_assert( sizeof( SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
  34910. static_assert( std::is_standard_layout<SubmitInfo>::value, "struct wrapper is not a standard layout!" );
  34911. template <>
  34912. struct CppType<StructureType, StructureType::eSubmitInfo>
  34913. {
  34914. using Type = SubmitInfo;
  34915. };
  34916. class Queue
  34917. {
  34918. public:
  34919. using CType = VkQueue;
  34920. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eQueue;
  34921. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue;
  34922. public:
  34923. VULKAN_HPP_CONSTEXPR Queue() VULKAN_HPP_NOEXCEPT
  34924. : m_queue(VK_NULL_HANDLE)
  34925. {}
  34926. VULKAN_HPP_CONSTEXPR Queue( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  34927. : m_queue(VK_NULL_HANDLE)
  34928. {}
  34929. VULKAN_HPP_TYPESAFE_EXPLICIT Queue( VkQueue queue ) VULKAN_HPP_NOEXCEPT
  34930. : m_queue( queue )
  34931. {}
  34932. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  34933. Queue & operator=(VkQueue queue) VULKAN_HPP_NOEXCEPT
  34934. {
  34935. m_queue = queue;
  34936. return *this;
  34937. }
  34938. #endif
  34939. Queue & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  34940. {
  34941. m_queue = VK_NULL_HANDLE;
  34942. return *this;
  34943. }
  34944. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  34945. auto operator<=>( Queue const& ) const = default;
  34946. #else
  34947. bool operator==( Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
  34948. {
  34949. return m_queue == rhs.m_queue;
  34950. }
  34951. bool operator!=(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
  34952. {
  34953. return m_queue != rhs.m_queue;
  34954. }
  34955. bool operator<(Queue const & rhs ) const VULKAN_HPP_NOEXCEPT
  34956. {
  34957. return m_queue < rhs.m_queue;
  34958. }
  34959. #endif
  34960. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34961. void getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34962. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34963. template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34964. VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  34965. template <typename CheckpointDataNVAllocator = std::allocator<CheckpointDataNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CheckpointDataNVAllocator, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type = 0>
  34966. VULKAN_HPP_NODISCARD std::vector<CheckpointDataNV, CheckpointDataNVAllocator> getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  34967. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  34968. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34969. void beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34970. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34971. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34972. void beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34973. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  34974. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34975. VULKAN_HPP_NODISCARD Result bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34976. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34977. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34978. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  34979. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  34980. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34981. void endDebugUtilsLabelEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34982. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34983. void insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34984. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34985. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34986. void insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34987. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  34988. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34989. VULKAN_HPP_NODISCARD Result presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34990. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34991. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34992. VULKAN_HPP_NODISCARD Result presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  34993. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  34994. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  34995. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34996. VULKAN_HPP_NODISCARD Result setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  34997. #else
  34998. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  34999. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  35000. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  35001. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  35002. VULKAN_HPP_NODISCARD Result submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  35003. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  35004. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  35005. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  35006. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  35007. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  35008. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  35009. VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  35010. #else
  35011. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  35012. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  35013. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  35014. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkQueue() const VULKAN_HPP_NOEXCEPT
  35015. {
  35016. return m_queue;
  35017. }
  35018. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  35019. {
  35020. return m_queue != VK_NULL_HANDLE;
  35021. }
  35022. bool operator!() const VULKAN_HPP_NOEXCEPT
  35023. {
  35024. return m_queue == VK_NULL_HANDLE;
  35025. }
  35026. private:
  35027. VkQueue m_queue;
  35028. };
  35029. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
  35030. template <>
  35031. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eQueue>
  35032. {
  35033. using type = VULKAN_HPP_NAMESPACE::Queue;
  35034. };
  35035. template <>
  35036. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eQueue>
  35037. {
  35038. using Type = VULKAN_HPP_NAMESPACE::Queue;
  35039. };
  35040. template <>
  35041. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eQueue>
  35042. {
  35043. using Type = VULKAN_HPP_NAMESPACE::Queue;
  35044. };
  35045. template <>
  35046. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Queue>
  35047. {
  35048. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  35049. };
  35050. struct DeviceQueueInfo2
  35051. {
  35052. static const bool allowDuplicate = false;
  35053. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueInfo2;
  35054. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35055. VULKAN_HPP_CONSTEXPR DeviceQueueInfo2(VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ = {}, uint32_t queueFamilyIndex_ = {}, uint32_t queueIndex_ = {}) VULKAN_HPP_NOEXCEPT
  35056. : flags( flags_ ), queueFamilyIndex( queueFamilyIndex_ ), queueIndex( queueIndex_ )
  35057. {}
  35058. VULKAN_HPP_CONSTEXPR DeviceQueueInfo2( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35059. DeviceQueueInfo2( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35060. : DeviceQueueInfo2( *reinterpret_cast<DeviceQueueInfo2 const *>( &rhs ) )
  35061. {}
  35062. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35063. VULKAN_HPP_CONSTEXPR_14 DeviceQueueInfo2 & operator=( DeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35064. DeviceQueueInfo2 & operator=( VkDeviceQueueInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35065. {
  35066. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 const *>( &rhs );
  35067. return *this;
  35068. }
  35069. DeviceQueueInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35070. {
  35071. pNext = pNext_;
  35072. return *this;
  35073. }
  35074. DeviceQueueInfo2 & setFlags( VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  35075. {
  35076. flags = flags_;
  35077. return *this;
  35078. }
  35079. DeviceQueueInfo2 & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  35080. {
  35081. queueFamilyIndex = queueFamilyIndex_;
  35082. return *this;
  35083. }
  35084. DeviceQueueInfo2 & setQueueIndex( uint32_t queueIndex_ ) VULKAN_HPP_NOEXCEPT
  35085. {
  35086. queueIndex = queueIndex_;
  35087. return *this;
  35088. }
  35089. operator VkDeviceQueueInfo2 const&() const VULKAN_HPP_NOEXCEPT
  35090. {
  35091. return *reinterpret_cast<const VkDeviceQueueInfo2*>( this );
  35092. }
  35093. operator VkDeviceQueueInfo2 &() VULKAN_HPP_NOEXCEPT
  35094. {
  35095. return *reinterpret_cast<VkDeviceQueueInfo2*>( this );
  35096. }
  35097. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35098. auto operator<=>( DeviceQueueInfo2 const& ) const = default;
  35099. #else
  35100. bool operator==( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35101. {
  35102. return ( sType == rhs.sType )
  35103. && ( pNext == rhs.pNext )
  35104. && ( flags == rhs.flags )
  35105. && ( queueFamilyIndex == rhs.queueFamilyIndex )
  35106. && ( queueIndex == rhs.queueIndex );
  35107. }
  35108. bool operator!=( DeviceQueueInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35109. {
  35110. return !operator==( rhs );
  35111. }
  35112. #endif
  35113. public:
  35114. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueInfo2;
  35115. const void* pNext = {};
  35116. VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags flags = {};
  35117. uint32_t queueFamilyIndex = {};
  35118. uint32_t queueIndex = {};
  35119. };
  35120. static_assert( sizeof( DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
  35121. static_assert( std::is_standard_layout<DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
  35122. template <>
  35123. struct CppType<StructureType, StructureType::eDeviceQueueInfo2>
  35124. {
  35125. using Type = DeviceQueueInfo2;
  35126. };
  35127. struct FenceGetFdInfoKHR
  35128. {
  35129. static const bool allowDuplicate = false;
  35130. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetFdInfoKHR;
  35131. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35132. VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  35133. : fence( fence_ ), handleType( handleType_ )
  35134. {}
  35135. VULKAN_HPP_CONSTEXPR FenceGetFdInfoKHR( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35136. FenceGetFdInfoKHR( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35137. : FenceGetFdInfoKHR( *reinterpret_cast<FenceGetFdInfoKHR const *>( &rhs ) )
  35138. {}
  35139. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35140. VULKAN_HPP_CONSTEXPR_14 FenceGetFdInfoKHR & operator=( FenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35141. FenceGetFdInfoKHR & operator=( VkFenceGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35142. {
  35143. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR const *>( &rhs );
  35144. return *this;
  35145. }
  35146. FenceGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35147. {
  35148. pNext = pNext_;
  35149. return *this;
  35150. }
  35151. FenceGetFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
  35152. {
  35153. fence = fence_;
  35154. return *this;
  35155. }
  35156. FenceGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  35157. {
  35158. handleType = handleType_;
  35159. return *this;
  35160. }
  35161. operator VkFenceGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  35162. {
  35163. return *reinterpret_cast<const VkFenceGetFdInfoKHR*>( this );
  35164. }
  35165. operator VkFenceGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  35166. {
  35167. return *reinterpret_cast<VkFenceGetFdInfoKHR*>( this );
  35168. }
  35169. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35170. auto operator<=>( FenceGetFdInfoKHR const& ) const = default;
  35171. #else
  35172. bool operator==( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35173. {
  35174. return ( sType == rhs.sType )
  35175. && ( pNext == rhs.pNext )
  35176. && ( fence == rhs.fence )
  35177. && ( handleType == rhs.handleType );
  35178. }
  35179. bool operator!=( FenceGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35180. {
  35181. return !operator==( rhs );
  35182. }
  35183. #endif
  35184. public:
  35185. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetFdInfoKHR;
  35186. const void* pNext = {};
  35187. VULKAN_HPP_NAMESPACE::Fence fence = {};
  35188. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  35189. };
  35190. static_assert( sizeof( FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
  35191. static_assert( std::is_standard_layout<FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  35192. template <>
  35193. struct CppType<StructureType, StructureType::eFenceGetFdInfoKHR>
  35194. {
  35195. using Type = FenceGetFdInfoKHR;
  35196. };
  35197. #ifdef VK_USE_PLATFORM_WIN32_KHR
  35198. struct FenceGetWin32HandleInfoKHR
  35199. {
  35200. static const bool allowDuplicate = false;
  35201. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFenceGetWin32HandleInfoKHR;
  35202. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35203. VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  35204. : fence( fence_ ), handleType( handleType_ )
  35205. {}
  35206. VULKAN_HPP_CONSTEXPR FenceGetWin32HandleInfoKHR( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35207. FenceGetWin32HandleInfoKHR( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35208. : FenceGetWin32HandleInfoKHR( *reinterpret_cast<FenceGetWin32HandleInfoKHR const *>( &rhs ) )
  35209. {}
  35210. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35211. VULKAN_HPP_CONSTEXPR_14 FenceGetWin32HandleInfoKHR & operator=( FenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35212. FenceGetWin32HandleInfoKHR & operator=( VkFenceGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35213. {
  35214. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR const *>( &rhs );
  35215. return *this;
  35216. }
  35217. FenceGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35218. {
  35219. pNext = pNext_;
  35220. return *this;
  35221. }
  35222. FenceGetWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
  35223. {
  35224. fence = fence_;
  35225. return *this;
  35226. }
  35227. FenceGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  35228. {
  35229. handleType = handleType_;
  35230. return *this;
  35231. }
  35232. operator VkFenceGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  35233. {
  35234. return *reinterpret_cast<const VkFenceGetWin32HandleInfoKHR*>( this );
  35235. }
  35236. operator VkFenceGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  35237. {
  35238. return *reinterpret_cast<VkFenceGetWin32HandleInfoKHR*>( this );
  35239. }
  35240. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35241. auto operator<=>( FenceGetWin32HandleInfoKHR const& ) const = default;
  35242. #else
  35243. bool operator==( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35244. {
  35245. return ( sType == rhs.sType )
  35246. && ( pNext == rhs.pNext )
  35247. && ( fence == rhs.fence )
  35248. && ( handleType == rhs.handleType );
  35249. }
  35250. bool operator!=( FenceGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35251. {
  35252. return !operator==( rhs );
  35253. }
  35254. #endif
  35255. public:
  35256. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFenceGetWin32HandleInfoKHR;
  35257. const void* pNext = {};
  35258. VULKAN_HPP_NAMESPACE::Fence fence = {};
  35259. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  35260. };
  35261. static_assert( sizeof( FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  35262. static_assert( std::is_standard_layout<FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  35263. template <>
  35264. struct CppType<StructureType, StructureType::eFenceGetWin32HandleInfoKHR>
  35265. {
  35266. using Type = FenceGetWin32HandleInfoKHR;
  35267. };
  35268. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  35269. struct GeneratedCommandsMemoryRequirementsInfoNV
  35270. {
  35271. static const bool allowDuplicate = false;
  35272. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
  35273. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35274. VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV(VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics, VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ = {}, uint32_t maxSequencesCount_ = {}) VULKAN_HPP_NOEXCEPT
  35275. : pipelineBindPoint( pipelineBindPoint_ ), pipeline( pipeline_ ), indirectCommandsLayout( indirectCommandsLayout_ ), maxSequencesCount( maxSequencesCount_ )
  35276. {}
  35277. VULKAN_HPP_CONSTEXPR GeneratedCommandsMemoryRequirementsInfoNV( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35278. GeneratedCommandsMemoryRequirementsInfoNV( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  35279. : GeneratedCommandsMemoryRequirementsInfoNV( *reinterpret_cast<GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs ) )
  35280. {}
  35281. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35282. VULKAN_HPP_CONSTEXPR_14 GeneratedCommandsMemoryRequirementsInfoNV & operator=( GeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35283. GeneratedCommandsMemoryRequirementsInfoNV & operator=( VkGeneratedCommandsMemoryRequirementsInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  35284. {
  35285. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV const *>( &rhs );
  35286. return *this;
  35287. }
  35288. GeneratedCommandsMemoryRequirementsInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35289. {
  35290. pNext = pNext_;
  35291. return *this;
  35292. }
  35293. GeneratedCommandsMemoryRequirementsInfoNV & setPipelineBindPoint( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint_ ) VULKAN_HPP_NOEXCEPT
  35294. {
  35295. pipelineBindPoint = pipelineBindPoint_;
  35296. return *this;
  35297. }
  35298. GeneratedCommandsMemoryRequirementsInfoNV & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
  35299. {
  35300. pipeline = pipeline_;
  35301. return *this;
  35302. }
  35303. GeneratedCommandsMemoryRequirementsInfoNV & setIndirectCommandsLayout( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout_ ) VULKAN_HPP_NOEXCEPT
  35304. {
  35305. indirectCommandsLayout = indirectCommandsLayout_;
  35306. return *this;
  35307. }
  35308. GeneratedCommandsMemoryRequirementsInfoNV & setMaxSequencesCount( uint32_t maxSequencesCount_ ) VULKAN_HPP_NOEXCEPT
  35309. {
  35310. maxSequencesCount = maxSequencesCount_;
  35311. return *this;
  35312. }
  35313. operator VkGeneratedCommandsMemoryRequirementsInfoNV const&() const VULKAN_HPP_NOEXCEPT
  35314. {
  35315. return *reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
  35316. }
  35317. operator VkGeneratedCommandsMemoryRequirementsInfoNV &() VULKAN_HPP_NOEXCEPT
  35318. {
  35319. return *reinterpret_cast<VkGeneratedCommandsMemoryRequirementsInfoNV*>( this );
  35320. }
  35321. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35322. auto operator<=>( GeneratedCommandsMemoryRequirementsInfoNV const& ) const = default;
  35323. #else
  35324. bool operator==( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  35325. {
  35326. return ( sType == rhs.sType )
  35327. && ( pNext == rhs.pNext )
  35328. && ( pipelineBindPoint == rhs.pipelineBindPoint )
  35329. && ( pipeline == rhs.pipeline )
  35330. && ( indirectCommandsLayout == rhs.indirectCommandsLayout )
  35331. && ( maxSequencesCount == rhs.maxSequencesCount );
  35332. }
  35333. bool operator!=( GeneratedCommandsMemoryRequirementsInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  35334. {
  35335. return !operator==( rhs );
  35336. }
  35337. #endif
  35338. public:
  35339. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGeneratedCommandsMemoryRequirementsInfoNV;
  35340. const void* pNext = {};
  35341. VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint = VULKAN_HPP_NAMESPACE::PipelineBindPoint::eGraphics;
  35342. VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
  35343. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout = {};
  35344. uint32_t maxSequencesCount = {};
  35345. };
  35346. static_assert( sizeof( GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ), "struct and wrapper have different size!" );
  35347. static_assert( std::is_standard_layout<GeneratedCommandsMemoryRequirementsInfoNV>::value, "struct wrapper is not a standard layout!" );
  35348. template <>
  35349. struct CppType<StructureType, StructureType::eGeneratedCommandsMemoryRequirementsInfoNV>
  35350. {
  35351. using Type = GeneratedCommandsMemoryRequirementsInfoNV;
  35352. };
  35353. struct ImageDrmFormatModifierPropertiesEXT
  35354. {
  35355. static const bool allowDuplicate = false;
  35356. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierPropertiesEXT;
  35357. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35358. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}) VULKAN_HPP_NOEXCEPT
  35359. : drmFormatModifier( drmFormatModifier_ )
  35360. {}
  35361. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierPropertiesEXT( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35362. ImageDrmFormatModifierPropertiesEXT( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  35363. : ImageDrmFormatModifierPropertiesEXT( *reinterpret_cast<ImageDrmFormatModifierPropertiesEXT const *>( &rhs ) )
  35364. {}
  35365. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35366. VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierPropertiesEXT & operator=( ImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35367. ImageDrmFormatModifierPropertiesEXT & operator=( VkImageDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  35368. {
  35369. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT const *>( &rhs );
  35370. return *this;
  35371. }
  35372. operator VkImageDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  35373. {
  35374. return *reinterpret_cast<const VkImageDrmFormatModifierPropertiesEXT*>( this );
  35375. }
  35376. operator VkImageDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  35377. {
  35378. return *reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT*>( this );
  35379. }
  35380. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35381. auto operator<=>( ImageDrmFormatModifierPropertiesEXT const& ) const = default;
  35382. #else
  35383. bool operator==( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  35384. {
  35385. return ( sType == rhs.sType )
  35386. && ( pNext == rhs.pNext )
  35387. && ( drmFormatModifier == rhs.drmFormatModifier );
  35388. }
  35389. bool operator!=( ImageDrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  35390. {
  35391. return !operator==( rhs );
  35392. }
  35393. #endif
  35394. public:
  35395. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierPropertiesEXT;
  35396. void* pNext = {};
  35397. uint64_t drmFormatModifier = {};
  35398. };
  35399. static_assert( sizeof( ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
  35400. static_assert( std::is_standard_layout<ImageDrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  35401. template <>
  35402. struct CppType<StructureType, StructureType::eImageDrmFormatModifierPropertiesEXT>
  35403. {
  35404. using Type = ImageDrmFormatModifierPropertiesEXT;
  35405. };
  35406. struct ImageMemoryRequirementsInfo2
  35407. {
  35408. static const bool allowDuplicate = false;
  35409. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageMemoryRequirementsInfo2;
  35410. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35411. VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
  35412. : image( image_ )
  35413. {}
  35414. VULKAN_HPP_CONSTEXPR ImageMemoryRequirementsInfo2( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35415. ImageMemoryRequirementsInfo2( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35416. : ImageMemoryRequirementsInfo2( *reinterpret_cast<ImageMemoryRequirementsInfo2 const *>( &rhs ) )
  35417. {}
  35418. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35419. VULKAN_HPP_CONSTEXPR_14 ImageMemoryRequirementsInfo2 & operator=( ImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35420. ImageMemoryRequirementsInfo2 & operator=( VkImageMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35421. {
  35422. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 const *>( &rhs );
  35423. return *this;
  35424. }
  35425. ImageMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35426. {
  35427. pNext = pNext_;
  35428. return *this;
  35429. }
  35430. ImageMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  35431. {
  35432. image = image_;
  35433. return *this;
  35434. }
  35435. operator VkImageMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
  35436. {
  35437. return *reinterpret_cast<const VkImageMemoryRequirementsInfo2*>( this );
  35438. }
  35439. operator VkImageMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
  35440. {
  35441. return *reinterpret_cast<VkImageMemoryRequirementsInfo2*>( this );
  35442. }
  35443. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35444. auto operator<=>( ImageMemoryRequirementsInfo2 const& ) const = default;
  35445. #else
  35446. bool operator==( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35447. {
  35448. return ( sType == rhs.sType )
  35449. && ( pNext == rhs.pNext )
  35450. && ( image == rhs.image );
  35451. }
  35452. bool operator!=( ImageMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35453. {
  35454. return !operator==( rhs );
  35455. }
  35456. #endif
  35457. public:
  35458. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageMemoryRequirementsInfo2;
  35459. const void* pNext = {};
  35460. VULKAN_HPP_NAMESPACE::Image image = {};
  35461. };
  35462. static_assert( sizeof( ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
  35463. static_assert( std::is_standard_layout<ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
  35464. template <>
  35465. struct CppType<StructureType, StructureType::eImageMemoryRequirementsInfo2>
  35466. {
  35467. using Type = ImageMemoryRequirementsInfo2;
  35468. };
  35469. using ImageMemoryRequirementsInfo2KHR = ImageMemoryRequirementsInfo2;
  35470. struct SparseImageFormatProperties
  35471. {
  35472. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35473. VULKAN_HPP_CONSTEXPR SparseImageFormatProperties(VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}, VULKAN_HPP_NAMESPACE::Extent3D imageGranularity_ = {}, VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  35474. : aspectMask( aspectMask_ ), imageGranularity( imageGranularity_ ), flags( flags_ )
  35475. {}
  35476. VULKAN_HPP_CONSTEXPR SparseImageFormatProperties( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35477. SparseImageFormatProperties( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  35478. : SparseImageFormatProperties( *reinterpret_cast<SparseImageFormatProperties const *>( &rhs ) )
  35479. {}
  35480. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35481. VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties & operator=( SparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35482. SparseImageFormatProperties & operator=( VkSparseImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  35483. {
  35484. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties const *>( &rhs );
  35485. return *this;
  35486. }
  35487. operator VkSparseImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
  35488. {
  35489. return *reinterpret_cast<const VkSparseImageFormatProperties*>( this );
  35490. }
  35491. operator VkSparseImageFormatProperties &() VULKAN_HPP_NOEXCEPT
  35492. {
  35493. return *reinterpret_cast<VkSparseImageFormatProperties*>( this );
  35494. }
  35495. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35496. auto operator<=>( SparseImageFormatProperties const& ) const = default;
  35497. #else
  35498. bool operator==( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  35499. {
  35500. return ( aspectMask == rhs.aspectMask )
  35501. && ( imageGranularity == rhs.imageGranularity )
  35502. && ( flags == rhs.flags );
  35503. }
  35504. bool operator!=( SparseImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  35505. {
  35506. return !operator==( rhs );
  35507. }
  35508. #endif
  35509. public:
  35510. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  35511. VULKAN_HPP_NAMESPACE::Extent3D imageGranularity = {};
  35512. VULKAN_HPP_NAMESPACE::SparseImageFormatFlags flags = {};
  35513. };
  35514. static_assert( sizeof( SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ), "struct and wrapper have different size!" );
  35515. static_assert( std::is_standard_layout<SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
  35516. struct SparseImageMemoryRequirements
  35517. {
  35518. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35519. VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties_ = {}, uint32_t imageMipTailFirstLod_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride_ = {}) VULKAN_HPP_NOEXCEPT
  35520. : formatProperties( formatProperties_ ), imageMipTailFirstLod( imageMipTailFirstLod_ ), imageMipTailSize( imageMipTailSize_ ), imageMipTailOffset( imageMipTailOffset_ ), imageMipTailStride( imageMipTailStride_ )
  35521. {}
  35522. VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35523. SparseImageMemoryRequirements( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  35524. : SparseImageMemoryRequirements( *reinterpret_cast<SparseImageMemoryRequirements const *>( &rhs ) )
  35525. {}
  35526. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35527. VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements & operator=( SparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35528. SparseImageMemoryRequirements & operator=( VkSparseImageMemoryRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  35529. {
  35530. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements const *>( &rhs );
  35531. return *this;
  35532. }
  35533. operator VkSparseImageMemoryRequirements const&() const VULKAN_HPP_NOEXCEPT
  35534. {
  35535. return *reinterpret_cast<const VkSparseImageMemoryRequirements*>( this );
  35536. }
  35537. operator VkSparseImageMemoryRequirements &() VULKAN_HPP_NOEXCEPT
  35538. {
  35539. return *reinterpret_cast<VkSparseImageMemoryRequirements*>( this );
  35540. }
  35541. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35542. auto operator<=>( SparseImageMemoryRequirements const& ) const = default;
  35543. #else
  35544. bool operator==( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  35545. {
  35546. return ( formatProperties == rhs.formatProperties )
  35547. && ( imageMipTailFirstLod == rhs.imageMipTailFirstLod )
  35548. && ( imageMipTailSize == rhs.imageMipTailSize )
  35549. && ( imageMipTailOffset == rhs.imageMipTailOffset )
  35550. && ( imageMipTailStride == rhs.imageMipTailStride );
  35551. }
  35552. bool operator!=( SparseImageMemoryRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  35553. {
  35554. return !operator==( rhs );
  35555. }
  35556. #endif
  35557. public:
  35558. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties formatProperties = {};
  35559. uint32_t imageMipTailFirstLod = {};
  35560. VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailSize = {};
  35561. VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailOffset = {};
  35562. VULKAN_HPP_NAMESPACE::DeviceSize imageMipTailStride = {};
  35563. };
  35564. static_assert( sizeof( SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ), "struct and wrapper have different size!" );
  35565. static_assert( std::is_standard_layout<SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
  35566. struct ImageSparseMemoryRequirementsInfo2
  35567. {
  35568. static const bool allowDuplicate = false;
  35569. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSparseMemoryRequirementsInfo2;
  35570. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35571. VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2(VULKAN_HPP_NAMESPACE::Image image_ = {}) VULKAN_HPP_NOEXCEPT
  35572. : image( image_ )
  35573. {}
  35574. VULKAN_HPP_CONSTEXPR ImageSparseMemoryRequirementsInfo2( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35575. ImageSparseMemoryRequirementsInfo2( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35576. : ImageSparseMemoryRequirementsInfo2( *reinterpret_cast<ImageSparseMemoryRequirementsInfo2 const *>( &rhs ) )
  35577. {}
  35578. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35579. VULKAN_HPP_CONSTEXPR_14 ImageSparseMemoryRequirementsInfo2 & operator=( ImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35580. ImageSparseMemoryRequirementsInfo2 & operator=( VkImageSparseMemoryRequirementsInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35581. {
  35582. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 const *>( &rhs );
  35583. return *this;
  35584. }
  35585. ImageSparseMemoryRequirementsInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35586. {
  35587. pNext = pNext_;
  35588. return *this;
  35589. }
  35590. ImageSparseMemoryRequirementsInfo2 & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  35591. {
  35592. image = image_;
  35593. return *this;
  35594. }
  35595. operator VkImageSparseMemoryRequirementsInfo2 const&() const VULKAN_HPP_NOEXCEPT
  35596. {
  35597. return *reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2*>( this );
  35598. }
  35599. operator VkImageSparseMemoryRequirementsInfo2 &() VULKAN_HPP_NOEXCEPT
  35600. {
  35601. return *reinterpret_cast<VkImageSparseMemoryRequirementsInfo2*>( this );
  35602. }
  35603. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35604. auto operator<=>( ImageSparseMemoryRequirementsInfo2 const& ) const = default;
  35605. #else
  35606. bool operator==( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35607. {
  35608. return ( sType == rhs.sType )
  35609. && ( pNext == rhs.pNext )
  35610. && ( image == rhs.image );
  35611. }
  35612. bool operator!=( ImageSparseMemoryRequirementsInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35613. {
  35614. return !operator==( rhs );
  35615. }
  35616. #endif
  35617. public:
  35618. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSparseMemoryRequirementsInfo2;
  35619. const void* pNext = {};
  35620. VULKAN_HPP_NAMESPACE::Image image = {};
  35621. };
  35622. static_assert( sizeof( ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ), "struct and wrapper have different size!" );
  35623. static_assert( std::is_standard_layout<ImageSparseMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
  35624. template <>
  35625. struct CppType<StructureType, StructureType::eImageSparseMemoryRequirementsInfo2>
  35626. {
  35627. using Type = ImageSparseMemoryRequirementsInfo2;
  35628. };
  35629. using ImageSparseMemoryRequirementsInfo2KHR = ImageSparseMemoryRequirementsInfo2;
  35630. struct SparseImageMemoryRequirements2
  35631. {
  35632. static const bool allowDuplicate = false;
  35633. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageMemoryRequirements2;
  35634. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35635. VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2(VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements_ = {}) VULKAN_HPP_NOEXCEPT
  35636. : memoryRequirements( memoryRequirements_ )
  35637. {}
  35638. VULKAN_HPP_CONSTEXPR SparseImageMemoryRequirements2( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35639. SparseImageMemoryRequirements2( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35640. : SparseImageMemoryRequirements2( *reinterpret_cast<SparseImageMemoryRequirements2 const *>( &rhs ) )
  35641. {}
  35642. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35643. VULKAN_HPP_CONSTEXPR_14 SparseImageMemoryRequirements2 & operator=( SparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35644. SparseImageMemoryRequirements2 & operator=( VkSparseImageMemoryRequirements2 const & rhs ) VULKAN_HPP_NOEXCEPT
  35645. {
  35646. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 const *>( &rhs );
  35647. return *this;
  35648. }
  35649. operator VkSparseImageMemoryRequirements2 const&() const VULKAN_HPP_NOEXCEPT
  35650. {
  35651. return *reinterpret_cast<const VkSparseImageMemoryRequirements2*>( this );
  35652. }
  35653. operator VkSparseImageMemoryRequirements2 &() VULKAN_HPP_NOEXCEPT
  35654. {
  35655. return *reinterpret_cast<VkSparseImageMemoryRequirements2*>( this );
  35656. }
  35657. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35658. auto operator<=>( SparseImageMemoryRequirements2 const& ) const = default;
  35659. #else
  35660. bool operator==( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35661. {
  35662. return ( sType == rhs.sType )
  35663. && ( pNext == rhs.pNext )
  35664. && ( memoryRequirements == rhs.memoryRequirements );
  35665. }
  35666. bool operator!=( SparseImageMemoryRequirements2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  35667. {
  35668. return !operator==( rhs );
  35669. }
  35670. #endif
  35671. public:
  35672. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageMemoryRequirements2;
  35673. void* pNext = {};
  35674. VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements memoryRequirements = {};
  35675. };
  35676. static_assert( sizeof( SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ), "struct and wrapper have different size!" );
  35677. static_assert( std::is_standard_layout<SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
  35678. template <>
  35679. struct CppType<StructureType, StructureType::eSparseImageMemoryRequirements2>
  35680. {
  35681. using Type = SparseImageMemoryRequirements2;
  35682. };
  35683. using SparseImageMemoryRequirements2KHR = SparseImageMemoryRequirements2;
  35684. struct SubresourceLayout
  35685. {
  35686. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35687. VULKAN_HPP_CONSTEXPR SubresourceLayout(VULKAN_HPP_NAMESPACE::DeviceSize offset_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize rowPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize depthPitch_ = {}) VULKAN_HPP_NOEXCEPT
  35688. : offset( offset_ ), size( size_ ), rowPitch( rowPitch_ ), arrayPitch( arrayPitch_ ), depthPitch( depthPitch_ )
  35689. {}
  35690. VULKAN_HPP_CONSTEXPR SubresourceLayout( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35691. SubresourceLayout( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  35692. : SubresourceLayout( *reinterpret_cast<SubresourceLayout const *>( &rhs ) )
  35693. {}
  35694. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35695. VULKAN_HPP_CONSTEXPR_14 SubresourceLayout & operator=( SubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35696. SubresourceLayout & operator=( VkSubresourceLayout const & rhs ) VULKAN_HPP_NOEXCEPT
  35697. {
  35698. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubresourceLayout const *>( &rhs );
  35699. return *this;
  35700. }
  35701. operator VkSubresourceLayout const&() const VULKAN_HPP_NOEXCEPT
  35702. {
  35703. return *reinterpret_cast<const VkSubresourceLayout*>( this );
  35704. }
  35705. operator VkSubresourceLayout &() VULKAN_HPP_NOEXCEPT
  35706. {
  35707. return *reinterpret_cast<VkSubresourceLayout*>( this );
  35708. }
  35709. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35710. auto operator<=>( SubresourceLayout const& ) const = default;
  35711. #else
  35712. bool operator==( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  35713. {
  35714. return ( offset == rhs.offset )
  35715. && ( size == rhs.size )
  35716. && ( rowPitch == rhs.rowPitch )
  35717. && ( arrayPitch == rhs.arrayPitch )
  35718. && ( depthPitch == rhs.depthPitch );
  35719. }
  35720. bool operator!=( SubresourceLayout const& rhs ) const VULKAN_HPP_NOEXCEPT
  35721. {
  35722. return !operator==( rhs );
  35723. }
  35724. #endif
  35725. public:
  35726. VULKAN_HPP_NAMESPACE::DeviceSize offset = {};
  35727. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  35728. VULKAN_HPP_NAMESPACE::DeviceSize rowPitch = {};
  35729. VULKAN_HPP_NAMESPACE::DeviceSize arrayPitch = {};
  35730. VULKAN_HPP_NAMESPACE::DeviceSize depthPitch = {};
  35731. };
  35732. static_assert( sizeof( SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
  35733. static_assert( std::is_standard_layout<SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
  35734. struct ImageViewAddressPropertiesNVX
  35735. {
  35736. static const bool allowDuplicate = false;
  35737. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAddressPropertiesNVX;
  35738. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35739. VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX(VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}) VULKAN_HPP_NOEXCEPT
  35740. : deviceAddress( deviceAddress_ ), size( size_ )
  35741. {}
  35742. VULKAN_HPP_CONSTEXPR ImageViewAddressPropertiesNVX( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35743. ImageViewAddressPropertiesNVX( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  35744. : ImageViewAddressPropertiesNVX( *reinterpret_cast<ImageViewAddressPropertiesNVX const *>( &rhs ) )
  35745. {}
  35746. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35747. VULKAN_HPP_CONSTEXPR_14 ImageViewAddressPropertiesNVX & operator=( ImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35748. ImageViewAddressPropertiesNVX & operator=( VkImageViewAddressPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  35749. {
  35750. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX const *>( &rhs );
  35751. return *this;
  35752. }
  35753. operator VkImageViewAddressPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
  35754. {
  35755. return *reinterpret_cast<const VkImageViewAddressPropertiesNVX*>( this );
  35756. }
  35757. operator VkImageViewAddressPropertiesNVX &() VULKAN_HPP_NOEXCEPT
  35758. {
  35759. return *reinterpret_cast<VkImageViewAddressPropertiesNVX*>( this );
  35760. }
  35761. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35762. auto operator<=>( ImageViewAddressPropertiesNVX const& ) const = default;
  35763. #else
  35764. bool operator==( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  35765. {
  35766. return ( sType == rhs.sType )
  35767. && ( pNext == rhs.pNext )
  35768. && ( deviceAddress == rhs.deviceAddress )
  35769. && ( size == rhs.size );
  35770. }
  35771. bool operator!=( ImageViewAddressPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  35772. {
  35773. return !operator==( rhs );
  35774. }
  35775. #endif
  35776. public:
  35777. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAddressPropertiesNVX;
  35778. void* pNext = {};
  35779. VULKAN_HPP_NAMESPACE::DeviceAddress deviceAddress = {};
  35780. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  35781. };
  35782. static_assert( sizeof( ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ), "struct and wrapper have different size!" );
  35783. static_assert( std::is_standard_layout<ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
  35784. template <>
  35785. struct CppType<StructureType, StructureType::eImageViewAddressPropertiesNVX>
  35786. {
  35787. using Type = ImageViewAddressPropertiesNVX;
  35788. };
  35789. struct ImageViewHandleInfoNVX
  35790. {
  35791. static const bool allowDuplicate = false;
  35792. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewHandleInfoNVX;
  35793. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35794. VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX(VULKAN_HPP_NAMESPACE::ImageView imageView_ = {}, VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler, VULKAN_HPP_NAMESPACE::Sampler sampler_ = {}) VULKAN_HPP_NOEXCEPT
  35795. : imageView( imageView_ ), descriptorType( descriptorType_ ), sampler( sampler_ )
  35796. {}
  35797. VULKAN_HPP_CONSTEXPR ImageViewHandleInfoNVX( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35798. ImageViewHandleInfoNVX( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  35799. : ImageViewHandleInfoNVX( *reinterpret_cast<ImageViewHandleInfoNVX const *>( &rhs ) )
  35800. {}
  35801. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35802. VULKAN_HPP_CONSTEXPR_14 ImageViewHandleInfoNVX & operator=( ImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35803. ImageViewHandleInfoNVX & operator=( VkImageViewHandleInfoNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  35804. {
  35805. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX const *>( &rhs );
  35806. return *this;
  35807. }
  35808. ImageViewHandleInfoNVX & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35809. {
  35810. pNext = pNext_;
  35811. return *this;
  35812. }
  35813. ImageViewHandleInfoNVX & setImageView( VULKAN_HPP_NAMESPACE::ImageView imageView_ ) VULKAN_HPP_NOEXCEPT
  35814. {
  35815. imageView = imageView_;
  35816. return *this;
  35817. }
  35818. ImageViewHandleInfoNVX & setDescriptorType( VULKAN_HPP_NAMESPACE::DescriptorType descriptorType_ ) VULKAN_HPP_NOEXCEPT
  35819. {
  35820. descriptorType = descriptorType_;
  35821. return *this;
  35822. }
  35823. ImageViewHandleInfoNVX & setSampler( VULKAN_HPP_NAMESPACE::Sampler sampler_ ) VULKAN_HPP_NOEXCEPT
  35824. {
  35825. sampler = sampler_;
  35826. return *this;
  35827. }
  35828. operator VkImageViewHandleInfoNVX const&() const VULKAN_HPP_NOEXCEPT
  35829. {
  35830. return *reinterpret_cast<const VkImageViewHandleInfoNVX*>( this );
  35831. }
  35832. operator VkImageViewHandleInfoNVX &() VULKAN_HPP_NOEXCEPT
  35833. {
  35834. return *reinterpret_cast<VkImageViewHandleInfoNVX*>( this );
  35835. }
  35836. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35837. auto operator<=>( ImageViewHandleInfoNVX const& ) const = default;
  35838. #else
  35839. bool operator==( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  35840. {
  35841. return ( sType == rhs.sType )
  35842. && ( pNext == rhs.pNext )
  35843. && ( imageView == rhs.imageView )
  35844. && ( descriptorType == rhs.descriptorType )
  35845. && ( sampler == rhs.sampler );
  35846. }
  35847. bool operator!=( ImageViewHandleInfoNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  35848. {
  35849. return !operator==( rhs );
  35850. }
  35851. #endif
  35852. public:
  35853. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewHandleInfoNVX;
  35854. const void* pNext = {};
  35855. VULKAN_HPP_NAMESPACE::ImageView imageView = {};
  35856. VULKAN_HPP_NAMESPACE::DescriptorType descriptorType = VULKAN_HPP_NAMESPACE::DescriptorType::eSampler;
  35857. VULKAN_HPP_NAMESPACE::Sampler sampler = {};
  35858. };
  35859. static_assert( sizeof( ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ), "struct and wrapper have different size!" );
  35860. static_assert( std::is_standard_layout<ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
  35861. template <>
  35862. struct CppType<StructureType, StructureType::eImageViewHandleInfoNVX>
  35863. {
  35864. using Type = ImageViewHandleInfoNVX;
  35865. };
  35866. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  35867. struct MemoryGetAndroidHardwareBufferInfoANDROID
  35868. {
  35869. static const bool allowDuplicate = false;
  35870. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
  35871. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35872. VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}) VULKAN_HPP_NOEXCEPT
  35873. : memory( memory_ )
  35874. {}
  35875. VULKAN_HPP_CONSTEXPR MemoryGetAndroidHardwareBufferInfoANDROID( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35876. MemoryGetAndroidHardwareBufferInfoANDROID( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  35877. : MemoryGetAndroidHardwareBufferInfoANDROID( *reinterpret_cast<MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
  35878. {}
  35879. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35880. VULKAN_HPP_CONSTEXPR_14 MemoryGetAndroidHardwareBufferInfoANDROID & operator=( MemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35881. MemoryGetAndroidHardwareBufferInfoANDROID & operator=( VkMemoryGetAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  35882. {
  35883. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID const *>( &rhs );
  35884. return *this;
  35885. }
  35886. MemoryGetAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35887. {
  35888. pNext = pNext_;
  35889. return *this;
  35890. }
  35891. MemoryGetAndroidHardwareBufferInfoANDROID & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  35892. {
  35893. memory = memory_;
  35894. return *this;
  35895. }
  35896. operator VkMemoryGetAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
  35897. {
  35898. return *reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
  35899. }
  35900. operator VkMemoryGetAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
  35901. {
  35902. return *reinterpret_cast<VkMemoryGetAndroidHardwareBufferInfoANDROID*>( this );
  35903. }
  35904. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35905. auto operator<=>( MemoryGetAndroidHardwareBufferInfoANDROID const& ) const = default;
  35906. #else
  35907. bool operator==( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  35908. {
  35909. return ( sType == rhs.sType )
  35910. && ( pNext == rhs.pNext )
  35911. && ( memory == rhs.memory );
  35912. }
  35913. bool operator!=( MemoryGetAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  35914. {
  35915. return !operator==( rhs );
  35916. }
  35917. #endif
  35918. public:
  35919. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID;
  35920. const void* pNext = {};
  35921. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  35922. };
  35923. static_assert( sizeof( MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
  35924. static_assert( std::is_standard_layout<MemoryGetAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
  35925. template <>
  35926. struct CppType<StructureType, StructureType::eMemoryGetAndroidHardwareBufferInfoANDROID>
  35927. {
  35928. using Type = MemoryGetAndroidHardwareBufferInfoANDROID;
  35929. };
  35930. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  35931. struct MemoryGetFdInfoKHR
  35932. {
  35933. static const bool allowDuplicate = false;
  35934. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetFdInfoKHR;
  35935. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35936. VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  35937. : memory( memory_ ), handleType( handleType_ )
  35938. {}
  35939. VULKAN_HPP_CONSTEXPR MemoryGetFdInfoKHR( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35940. MemoryGetFdInfoKHR( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35941. : MemoryGetFdInfoKHR( *reinterpret_cast<MemoryGetFdInfoKHR const *>( &rhs ) )
  35942. {}
  35943. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  35944. VULKAN_HPP_CONSTEXPR_14 MemoryGetFdInfoKHR & operator=( MemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  35945. MemoryGetFdInfoKHR & operator=( VkMemoryGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  35946. {
  35947. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR const *>( &rhs );
  35948. return *this;
  35949. }
  35950. MemoryGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  35951. {
  35952. pNext = pNext_;
  35953. return *this;
  35954. }
  35955. MemoryGetFdInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  35956. {
  35957. memory = memory_;
  35958. return *this;
  35959. }
  35960. MemoryGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  35961. {
  35962. handleType = handleType_;
  35963. return *this;
  35964. }
  35965. operator VkMemoryGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  35966. {
  35967. return *reinterpret_cast<const VkMemoryGetFdInfoKHR*>( this );
  35968. }
  35969. operator VkMemoryGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  35970. {
  35971. return *reinterpret_cast<VkMemoryGetFdInfoKHR*>( this );
  35972. }
  35973. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  35974. auto operator<=>( MemoryGetFdInfoKHR const& ) const = default;
  35975. #else
  35976. bool operator==( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35977. {
  35978. return ( sType == rhs.sType )
  35979. && ( pNext == rhs.pNext )
  35980. && ( memory == rhs.memory )
  35981. && ( handleType == rhs.handleType );
  35982. }
  35983. bool operator!=( MemoryGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  35984. {
  35985. return !operator==( rhs );
  35986. }
  35987. #endif
  35988. public:
  35989. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetFdInfoKHR;
  35990. const void* pNext = {};
  35991. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  35992. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  35993. };
  35994. static_assert( sizeof( MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
  35995. static_assert( std::is_standard_layout<MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  35996. template <>
  35997. struct CppType<StructureType, StructureType::eMemoryGetFdInfoKHR>
  35998. {
  35999. using Type = MemoryGetFdInfoKHR;
  36000. };
  36001. struct MemoryFdPropertiesKHR
  36002. {
  36003. static const bool allowDuplicate = false;
  36004. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryFdPropertiesKHR;
  36005. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36006. VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
  36007. : memoryTypeBits( memoryTypeBits_ )
  36008. {}
  36009. VULKAN_HPP_CONSTEXPR MemoryFdPropertiesKHR( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36010. MemoryFdPropertiesKHR( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36011. : MemoryFdPropertiesKHR( *reinterpret_cast<MemoryFdPropertiesKHR const *>( &rhs ) )
  36012. {}
  36013. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36014. VULKAN_HPP_CONSTEXPR_14 MemoryFdPropertiesKHR & operator=( MemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36015. MemoryFdPropertiesKHR & operator=( VkMemoryFdPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36016. {
  36017. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR const *>( &rhs );
  36018. return *this;
  36019. }
  36020. operator VkMemoryFdPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  36021. {
  36022. return *reinterpret_cast<const VkMemoryFdPropertiesKHR*>( this );
  36023. }
  36024. operator VkMemoryFdPropertiesKHR &() VULKAN_HPP_NOEXCEPT
  36025. {
  36026. return *reinterpret_cast<VkMemoryFdPropertiesKHR*>( this );
  36027. }
  36028. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36029. auto operator<=>( MemoryFdPropertiesKHR const& ) const = default;
  36030. #else
  36031. bool operator==( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36032. {
  36033. return ( sType == rhs.sType )
  36034. && ( pNext == rhs.pNext )
  36035. && ( memoryTypeBits == rhs.memoryTypeBits );
  36036. }
  36037. bool operator!=( MemoryFdPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36038. {
  36039. return !operator==( rhs );
  36040. }
  36041. #endif
  36042. public:
  36043. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryFdPropertiesKHR;
  36044. void* pNext = {};
  36045. uint32_t memoryTypeBits = {};
  36046. };
  36047. static_assert( sizeof( MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ), "struct and wrapper have different size!" );
  36048. static_assert( std::is_standard_layout<MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  36049. template <>
  36050. struct CppType<StructureType, StructureType::eMemoryFdPropertiesKHR>
  36051. {
  36052. using Type = MemoryFdPropertiesKHR;
  36053. };
  36054. struct MemoryHostPointerPropertiesEXT
  36055. {
  36056. static const bool allowDuplicate = false;
  36057. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryHostPointerPropertiesEXT;
  36058. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36059. VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
  36060. : memoryTypeBits( memoryTypeBits_ )
  36061. {}
  36062. VULKAN_HPP_CONSTEXPR MemoryHostPointerPropertiesEXT( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36063. MemoryHostPointerPropertiesEXT( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  36064. : MemoryHostPointerPropertiesEXT( *reinterpret_cast<MemoryHostPointerPropertiesEXT const *>( &rhs ) )
  36065. {}
  36066. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36067. VULKAN_HPP_CONSTEXPR_14 MemoryHostPointerPropertiesEXT & operator=( MemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36068. MemoryHostPointerPropertiesEXT & operator=( VkMemoryHostPointerPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  36069. {
  36070. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT const *>( &rhs );
  36071. return *this;
  36072. }
  36073. operator VkMemoryHostPointerPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  36074. {
  36075. return *reinterpret_cast<const VkMemoryHostPointerPropertiesEXT*>( this );
  36076. }
  36077. operator VkMemoryHostPointerPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  36078. {
  36079. return *reinterpret_cast<VkMemoryHostPointerPropertiesEXT*>( this );
  36080. }
  36081. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36082. auto operator<=>( MemoryHostPointerPropertiesEXT const& ) const = default;
  36083. #else
  36084. bool operator==( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  36085. {
  36086. return ( sType == rhs.sType )
  36087. && ( pNext == rhs.pNext )
  36088. && ( memoryTypeBits == rhs.memoryTypeBits );
  36089. }
  36090. bool operator!=( MemoryHostPointerPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  36091. {
  36092. return !operator==( rhs );
  36093. }
  36094. #endif
  36095. public:
  36096. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryHostPointerPropertiesEXT;
  36097. void* pNext = {};
  36098. uint32_t memoryTypeBits = {};
  36099. };
  36100. static_assert( sizeof( MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ), "struct and wrapper have different size!" );
  36101. static_assert( std::is_standard_layout<MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  36102. template <>
  36103. struct CppType<StructureType, StructureType::eMemoryHostPointerPropertiesEXT>
  36104. {
  36105. using Type = MemoryHostPointerPropertiesEXT;
  36106. };
  36107. #ifdef VK_USE_PLATFORM_WIN32_KHR
  36108. struct MemoryGetWin32HandleInfoKHR
  36109. {
  36110. static const bool allowDuplicate = false;
  36111. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryGetWin32HandleInfoKHR;
  36112. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36113. VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::DeviceMemory memory_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  36114. : memory( memory_ ), handleType( handleType_ )
  36115. {}
  36116. VULKAN_HPP_CONSTEXPR MemoryGetWin32HandleInfoKHR( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36117. MemoryGetWin32HandleInfoKHR( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36118. : MemoryGetWin32HandleInfoKHR( *reinterpret_cast<MemoryGetWin32HandleInfoKHR const *>( &rhs ) )
  36119. {}
  36120. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36121. VULKAN_HPP_CONSTEXPR_14 MemoryGetWin32HandleInfoKHR & operator=( MemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36122. MemoryGetWin32HandleInfoKHR & operator=( VkMemoryGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36123. {
  36124. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR const *>( &rhs );
  36125. return *this;
  36126. }
  36127. MemoryGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36128. {
  36129. pNext = pNext_;
  36130. return *this;
  36131. }
  36132. MemoryGetWin32HandleInfoKHR & setMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory_ ) VULKAN_HPP_NOEXCEPT
  36133. {
  36134. memory = memory_;
  36135. return *this;
  36136. }
  36137. MemoryGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  36138. {
  36139. handleType = handleType_;
  36140. return *this;
  36141. }
  36142. operator VkMemoryGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36143. {
  36144. return *reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR*>( this );
  36145. }
  36146. operator VkMemoryGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  36147. {
  36148. return *reinterpret_cast<VkMemoryGetWin32HandleInfoKHR*>( this );
  36149. }
  36150. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36151. auto operator<=>( MemoryGetWin32HandleInfoKHR const& ) const = default;
  36152. #else
  36153. bool operator==( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36154. {
  36155. return ( sType == rhs.sType )
  36156. && ( pNext == rhs.pNext )
  36157. && ( memory == rhs.memory )
  36158. && ( handleType == rhs.handleType );
  36159. }
  36160. bool operator!=( MemoryGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36161. {
  36162. return !operator==( rhs );
  36163. }
  36164. #endif
  36165. public:
  36166. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryGetWin32HandleInfoKHR;
  36167. const void* pNext = {};
  36168. VULKAN_HPP_NAMESPACE::DeviceMemory memory = {};
  36169. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  36170. };
  36171. static_assert( sizeof( MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  36172. static_assert( std::is_standard_layout<MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  36173. template <>
  36174. struct CppType<StructureType, StructureType::eMemoryGetWin32HandleInfoKHR>
  36175. {
  36176. using Type = MemoryGetWin32HandleInfoKHR;
  36177. };
  36178. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  36179. #ifdef VK_USE_PLATFORM_WIN32_KHR
  36180. struct MemoryWin32HandlePropertiesKHR
  36181. {
  36182. static const bool allowDuplicate = false;
  36183. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryWin32HandlePropertiesKHR;
  36184. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36185. VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR(uint32_t memoryTypeBits_ = {}) VULKAN_HPP_NOEXCEPT
  36186. : memoryTypeBits( memoryTypeBits_ )
  36187. {}
  36188. VULKAN_HPP_CONSTEXPR MemoryWin32HandlePropertiesKHR( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36189. MemoryWin32HandlePropertiesKHR( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36190. : MemoryWin32HandlePropertiesKHR( *reinterpret_cast<MemoryWin32HandlePropertiesKHR const *>( &rhs ) )
  36191. {}
  36192. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36193. VULKAN_HPP_CONSTEXPR_14 MemoryWin32HandlePropertiesKHR & operator=( MemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36194. MemoryWin32HandlePropertiesKHR & operator=( VkMemoryWin32HandlePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36195. {
  36196. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR const *>( &rhs );
  36197. return *this;
  36198. }
  36199. operator VkMemoryWin32HandlePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  36200. {
  36201. return *reinterpret_cast<const VkMemoryWin32HandlePropertiesKHR*>( this );
  36202. }
  36203. operator VkMemoryWin32HandlePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  36204. {
  36205. return *reinterpret_cast<VkMemoryWin32HandlePropertiesKHR*>( this );
  36206. }
  36207. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36208. auto operator<=>( MemoryWin32HandlePropertiesKHR const& ) const = default;
  36209. #else
  36210. bool operator==( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36211. {
  36212. return ( sType == rhs.sType )
  36213. && ( pNext == rhs.pNext )
  36214. && ( memoryTypeBits == rhs.memoryTypeBits );
  36215. }
  36216. bool operator!=( MemoryWin32HandlePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36217. {
  36218. return !operator==( rhs );
  36219. }
  36220. #endif
  36221. public:
  36222. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryWin32HandlePropertiesKHR;
  36223. void* pNext = {};
  36224. uint32_t memoryTypeBits = {};
  36225. };
  36226. static_assert( sizeof( MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ), "struct and wrapper have different size!" );
  36227. static_assert( std::is_standard_layout<MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  36228. template <>
  36229. struct CppType<StructureType, StructureType::eMemoryWin32HandlePropertiesKHR>
  36230. {
  36231. using Type = MemoryWin32HandlePropertiesKHR;
  36232. };
  36233. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  36234. struct PastPresentationTimingGOOGLE
  36235. {
  36236. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36237. VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}, uint64_t actualPresentTime_ = {}, uint64_t earliestPresentTime_ = {}, uint64_t presentMargin_ = {}) VULKAN_HPP_NOEXCEPT
  36238. : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ ), actualPresentTime( actualPresentTime_ ), earliestPresentTime( earliestPresentTime_ ), presentMargin( presentMargin_ )
  36239. {}
  36240. VULKAN_HPP_CONSTEXPR PastPresentationTimingGOOGLE( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36241. PastPresentationTimingGOOGLE( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  36242. : PastPresentationTimingGOOGLE( *reinterpret_cast<PastPresentationTimingGOOGLE const *>( &rhs ) )
  36243. {}
  36244. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36245. VULKAN_HPP_CONSTEXPR_14 PastPresentationTimingGOOGLE & operator=( PastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36246. PastPresentationTimingGOOGLE & operator=( VkPastPresentationTimingGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  36247. {
  36248. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE const *>( &rhs );
  36249. return *this;
  36250. }
  36251. operator VkPastPresentationTimingGOOGLE const&() const VULKAN_HPP_NOEXCEPT
  36252. {
  36253. return *reinterpret_cast<const VkPastPresentationTimingGOOGLE*>( this );
  36254. }
  36255. operator VkPastPresentationTimingGOOGLE &() VULKAN_HPP_NOEXCEPT
  36256. {
  36257. return *reinterpret_cast<VkPastPresentationTimingGOOGLE*>( this );
  36258. }
  36259. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36260. auto operator<=>( PastPresentationTimingGOOGLE const& ) const = default;
  36261. #else
  36262. bool operator==( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  36263. {
  36264. return ( presentID == rhs.presentID )
  36265. && ( desiredPresentTime == rhs.desiredPresentTime )
  36266. && ( actualPresentTime == rhs.actualPresentTime )
  36267. && ( earliestPresentTime == rhs.earliestPresentTime )
  36268. && ( presentMargin == rhs.presentMargin );
  36269. }
  36270. bool operator!=( PastPresentationTimingGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  36271. {
  36272. return !operator==( rhs );
  36273. }
  36274. #endif
  36275. public:
  36276. uint32_t presentID = {};
  36277. uint64_t desiredPresentTime = {};
  36278. uint64_t actualPresentTime = {};
  36279. uint64_t earliestPresentTime = {};
  36280. uint64_t presentMargin = {};
  36281. };
  36282. static_assert( sizeof( PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ), "struct and wrapper have different size!" );
  36283. static_assert( std::is_standard_layout<PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
  36284. union PerformanceValueDataINTEL
  36285. {
  36286. PerformanceValueDataINTEL( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const& rhs ) VULKAN_HPP_NOEXCEPT
  36287. {
  36288. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
  36289. }
  36290. PerformanceValueDataINTEL( uint32_t value32_ = {} )
  36291. : value32( value32_ )
  36292. {}
  36293. PerformanceValueDataINTEL( uint64_t value64_ )
  36294. : value64( value64_ )
  36295. {}
  36296. PerformanceValueDataINTEL( float valueFloat_ )
  36297. : valueFloat( valueFloat_ )
  36298. {}
  36299. PerformanceValueDataINTEL( const char* valueString_ )
  36300. : valueString( valueString_ )
  36301. {}
  36302. PerformanceValueDataINTEL & setValue32( uint32_t value32_ ) VULKAN_HPP_NOEXCEPT
  36303. {
  36304. value32 = value32_;
  36305. return *this;
  36306. }
  36307. PerformanceValueDataINTEL & setValue64( uint64_t value64_ ) VULKAN_HPP_NOEXCEPT
  36308. {
  36309. value64 = value64_;
  36310. return *this;
  36311. }
  36312. PerformanceValueDataINTEL & setValueFloat( float valueFloat_ ) VULKAN_HPP_NOEXCEPT
  36313. {
  36314. valueFloat = valueFloat_;
  36315. return *this;
  36316. }
  36317. PerformanceValueDataINTEL & setValueBool( VULKAN_HPP_NAMESPACE::Bool32 valueBool_ ) VULKAN_HPP_NOEXCEPT
  36318. {
  36319. valueBool = valueBool_;
  36320. return *this;
  36321. }
  36322. PerformanceValueDataINTEL & setValueString( const char* valueString_ ) VULKAN_HPP_NOEXCEPT
  36323. {
  36324. valueString = valueString_;
  36325. return *this;
  36326. }
  36327. VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL & operator=( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  36328. {
  36329. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) );
  36330. return *this;
  36331. }
  36332. operator VkPerformanceValueDataINTEL const&() const
  36333. {
  36334. return *reinterpret_cast<const VkPerformanceValueDataINTEL*>(this);
  36335. }
  36336. operator VkPerformanceValueDataINTEL &()
  36337. {
  36338. return *reinterpret_cast<VkPerformanceValueDataINTEL*>(this);
  36339. }
  36340. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  36341. uint32_t value32;
  36342. uint64_t value64;
  36343. float valueFloat;
  36344. VULKAN_HPP_NAMESPACE::Bool32 valueBool;
  36345. const char* valueString;
  36346. #else
  36347. uint32_t value32;
  36348. uint64_t value64;
  36349. float valueFloat;
  36350. VkBool32 valueBool;
  36351. const char* valueString;
  36352. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  36353. };
  36354. struct PerformanceValueINTEL
  36355. {
  36356. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36357. PerformanceValueINTEL(VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32, VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data_ = {}) VULKAN_HPP_NOEXCEPT
  36358. : type( type_ ), data( data_ )
  36359. {}
  36360. PerformanceValueINTEL( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36361. PerformanceValueINTEL( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  36362. : PerformanceValueINTEL( *reinterpret_cast<PerformanceValueINTEL const *>( &rhs ) )
  36363. {}
  36364. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36365. PerformanceValueINTEL & operator=( PerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36366. PerformanceValueINTEL & operator=( VkPerformanceValueINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  36367. {
  36368. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL const *>( &rhs );
  36369. return *this;
  36370. }
  36371. PerformanceValueINTEL & setType( VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type_ ) VULKAN_HPP_NOEXCEPT
  36372. {
  36373. type = type_;
  36374. return *this;
  36375. }
  36376. PerformanceValueINTEL & setData( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL const & data_ ) VULKAN_HPP_NOEXCEPT
  36377. {
  36378. data = data_;
  36379. return *this;
  36380. }
  36381. operator VkPerformanceValueINTEL const&() const VULKAN_HPP_NOEXCEPT
  36382. {
  36383. return *reinterpret_cast<const VkPerformanceValueINTEL*>( this );
  36384. }
  36385. operator VkPerformanceValueINTEL &() VULKAN_HPP_NOEXCEPT
  36386. {
  36387. return *reinterpret_cast<VkPerformanceValueINTEL*>( this );
  36388. }
  36389. public:
  36390. VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL type = VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL::eUint32;
  36391. VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL data = {};
  36392. };
  36393. static_assert( sizeof( PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ), "struct and wrapper have different size!" );
  36394. static_assert( std::is_standard_layout<PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
  36395. struct PipelineExecutableInfoKHR
  36396. {
  36397. static const bool allowDuplicate = false;
  36398. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInfoKHR;
  36399. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36400. VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}, uint32_t executableIndex_ = {}) VULKAN_HPP_NOEXCEPT
  36401. : pipeline( pipeline_ ), executableIndex( executableIndex_ )
  36402. {}
  36403. VULKAN_HPP_CONSTEXPR PipelineExecutableInfoKHR( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36404. PipelineExecutableInfoKHR( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36405. : PipelineExecutableInfoKHR( *reinterpret_cast<PipelineExecutableInfoKHR const *>( &rhs ) )
  36406. {}
  36407. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36408. VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInfoKHR & operator=( PipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36409. PipelineExecutableInfoKHR & operator=( VkPipelineExecutableInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36410. {
  36411. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR const *>( &rhs );
  36412. return *this;
  36413. }
  36414. PipelineExecutableInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36415. {
  36416. pNext = pNext_;
  36417. return *this;
  36418. }
  36419. PipelineExecutableInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
  36420. {
  36421. pipeline = pipeline_;
  36422. return *this;
  36423. }
  36424. PipelineExecutableInfoKHR & setExecutableIndex( uint32_t executableIndex_ ) VULKAN_HPP_NOEXCEPT
  36425. {
  36426. executableIndex = executableIndex_;
  36427. return *this;
  36428. }
  36429. operator VkPipelineExecutableInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36430. {
  36431. return *reinterpret_cast<const VkPipelineExecutableInfoKHR*>( this );
  36432. }
  36433. operator VkPipelineExecutableInfoKHR &() VULKAN_HPP_NOEXCEPT
  36434. {
  36435. return *reinterpret_cast<VkPipelineExecutableInfoKHR*>( this );
  36436. }
  36437. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36438. auto operator<=>( PipelineExecutableInfoKHR const& ) const = default;
  36439. #else
  36440. bool operator==( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36441. {
  36442. return ( sType == rhs.sType )
  36443. && ( pNext == rhs.pNext )
  36444. && ( pipeline == rhs.pipeline )
  36445. && ( executableIndex == rhs.executableIndex );
  36446. }
  36447. bool operator!=( PipelineExecutableInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36448. {
  36449. return !operator==( rhs );
  36450. }
  36451. #endif
  36452. public:
  36453. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInfoKHR;
  36454. const void* pNext = {};
  36455. VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
  36456. uint32_t executableIndex = {};
  36457. };
  36458. static_assert( sizeof( PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ), "struct and wrapper have different size!" );
  36459. static_assert( std::is_standard_layout<PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
  36460. template <>
  36461. struct CppType<StructureType, StructureType::ePipelineExecutableInfoKHR>
  36462. {
  36463. using Type = PipelineExecutableInfoKHR;
  36464. };
  36465. struct PipelineExecutableInternalRepresentationKHR
  36466. {
  36467. static const bool allowDuplicate = false;
  36468. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableInternalRepresentationKHR;
  36469. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36470. VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::Bool32 isText_ = {}, size_t dataSize_ = {}, void* pData_ = {}) VULKAN_HPP_NOEXCEPT
  36471. : name( name_ ), description( description_ ), isText( isText_ ), dataSize( dataSize_ ), pData( pData_ )
  36472. {}
  36473. VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36474. PipelineExecutableInternalRepresentationKHR( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36475. : PipelineExecutableInternalRepresentationKHR( *reinterpret_cast<PipelineExecutableInternalRepresentationKHR const *>( &rhs ) )
  36476. {}
  36477. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  36478. template <typename T>
  36479. PipelineExecutableInternalRepresentationKHR( std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_, VULKAN_HPP_NAMESPACE::Bool32 isText_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<T> const & data_ )
  36480. : name( name_ ), description( description_ ), isText( isText_ ), dataSize( data_.size() * sizeof(T) ), pData( data_.data() )
  36481. {}
  36482. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  36483. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36484. VULKAN_HPP_CONSTEXPR_14 PipelineExecutableInternalRepresentationKHR & operator=( PipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36485. PipelineExecutableInternalRepresentationKHR & operator=( VkPipelineExecutableInternalRepresentationKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36486. {
  36487. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR const *>( &rhs );
  36488. return *this;
  36489. }
  36490. operator VkPipelineExecutableInternalRepresentationKHR const&() const VULKAN_HPP_NOEXCEPT
  36491. {
  36492. return *reinterpret_cast<const VkPipelineExecutableInternalRepresentationKHR*>( this );
  36493. }
  36494. operator VkPipelineExecutableInternalRepresentationKHR &() VULKAN_HPP_NOEXCEPT
  36495. {
  36496. return *reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR*>( this );
  36497. }
  36498. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36499. auto operator<=>( PipelineExecutableInternalRepresentationKHR const& ) const = default;
  36500. #else
  36501. bool operator==( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36502. {
  36503. return ( sType == rhs.sType )
  36504. && ( pNext == rhs.pNext )
  36505. && ( name == rhs.name )
  36506. && ( description == rhs.description )
  36507. && ( isText == rhs.isText )
  36508. && ( dataSize == rhs.dataSize )
  36509. && ( pData == rhs.pData );
  36510. }
  36511. bool operator!=( PipelineExecutableInternalRepresentationKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36512. {
  36513. return !operator==( rhs );
  36514. }
  36515. #endif
  36516. public:
  36517. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableInternalRepresentationKHR;
  36518. void* pNext = {};
  36519. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
  36520. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  36521. VULKAN_HPP_NAMESPACE::Bool32 isText = {};
  36522. size_t dataSize = {};
  36523. void* pData = {};
  36524. };
  36525. static_assert( sizeof( PipelineExecutableInternalRepresentationKHR ) == sizeof( VkPipelineExecutableInternalRepresentationKHR ), "struct and wrapper have different size!" );
  36526. static_assert( std::is_standard_layout<PipelineExecutableInternalRepresentationKHR>::value, "struct wrapper is not a standard layout!" );
  36527. template <>
  36528. struct CppType<StructureType, StructureType::ePipelineExecutableInternalRepresentationKHR>
  36529. {
  36530. using Type = PipelineExecutableInternalRepresentationKHR;
  36531. };
  36532. struct PipelineInfoKHR
  36533. {
  36534. static const bool allowDuplicate = false;
  36535. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineInfoKHR;
  36536. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36537. VULKAN_HPP_CONSTEXPR PipelineInfoKHR(VULKAN_HPP_NAMESPACE::Pipeline pipeline_ = {}) VULKAN_HPP_NOEXCEPT
  36538. : pipeline( pipeline_ )
  36539. {}
  36540. VULKAN_HPP_CONSTEXPR PipelineInfoKHR( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36541. PipelineInfoKHR( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36542. : PipelineInfoKHR( *reinterpret_cast<PipelineInfoKHR const *>( &rhs ) )
  36543. {}
  36544. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36545. VULKAN_HPP_CONSTEXPR_14 PipelineInfoKHR & operator=( PipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36546. PipelineInfoKHR & operator=( VkPipelineInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36547. {
  36548. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineInfoKHR const *>( &rhs );
  36549. return *this;
  36550. }
  36551. PipelineInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36552. {
  36553. pNext = pNext_;
  36554. return *this;
  36555. }
  36556. PipelineInfoKHR & setPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline_ ) VULKAN_HPP_NOEXCEPT
  36557. {
  36558. pipeline = pipeline_;
  36559. return *this;
  36560. }
  36561. operator VkPipelineInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36562. {
  36563. return *reinterpret_cast<const VkPipelineInfoKHR*>( this );
  36564. }
  36565. operator VkPipelineInfoKHR &() VULKAN_HPP_NOEXCEPT
  36566. {
  36567. return *reinterpret_cast<VkPipelineInfoKHR*>( this );
  36568. }
  36569. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36570. auto operator<=>( PipelineInfoKHR const& ) const = default;
  36571. #else
  36572. bool operator==( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36573. {
  36574. return ( sType == rhs.sType )
  36575. && ( pNext == rhs.pNext )
  36576. && ( pipeline == rhs.pipeline );
  36577. }
  36578. bool operator!=( PipelineInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36579. {
  36580. return !operator==( rhs );
  36581. }
  36582. #endif
  36583. public:
  36584. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineInfoKHR;
  36585. const void* pNext = {};
  36586. VULKAN_HPP_NAMESPACE::Pipeline pipeline = {};
  36587. };
  36588. static_assert( sizeof( PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
  36589. static_assert( std::is_standard_layout<PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
  36590. template <>
  36591. struct CppType<StructureType, StructureType::ePipelineInfoKHR>
  36592. {
  36593. using Type = PipelineInfoKHR;
  36594. };
  36595. struct PipelineExecutablePropertiesKHR
  36596. {
  36597. static const bool allowDuplicate = false;
  36598. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutablePropertiesKHR;
  36599. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36600. VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR(VULKAN_HPP_NAMESPACE::ShaderStageFlags stages_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, uint32_t subgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
  36601. : stages( stages_ ), name( name_ ), description( description_ ), subgroupSize( subgroupSize_ )
  36602. {}
  36603. VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36604. PipelineExecutablePropertiesKHR( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36605. : PipelineExecutablePropertiesKHR( *reinterpret_cast<PipelineExecutablePropertiesKHR const *>( &rhs ) )
  36606. {}
  36607. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36608. VULKAN_HPP_CONSTEXPR_14 PipelineExecutablePropertiesKHR & operator=( PipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36609. PipelineExecutablePropertiesKHR & operator=( VkPipelineExecutablePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36610. {
  36611. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR const *>( &rhs );
  36612. return *this;
  36613. }
  36614. operator VkPipelineExecutablePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  36615. {
  36616. return *reinterpret_cast<const VkPipelineExecutablePropertiesKHR*>( this );
  36617. }
  36618. operator VkPipelineExecutablePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  36619. {
  36620. return *reinterpret_cast<VkPipelineExecutablePropertiesKHR*>( this );
  36621. }
  36622. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36623. auto operator<=>( PipelineExecutablePropertiesKHR const& ) const = default;
  36624. #else
  36625. bool operator==( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36626. {
  36627. return ( sType == rhs.sType )
  36628. && ( pNext == rhs.pNext )
  36629. && ( stages == rhs.stages )
  36630. && ( name == rhs.name )
  36631. && ( description == rhs.description )
  36632. && ( subgroupSize == rhs.subgroupSize );
  36633. }
  36634. bool operator!=( PipelineExecutablePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36635. {
  36636. return !operator==( rhs );
  36637. }
  36638. #endif
  36639. public:
  36640. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutablePropertiesKHR;
  36641. void* pNext = {};
  36642. VULKAN_HPP_NAMESPACE::ShaderStageFlags stages = {};
  36643. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
  36644. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  36645. uint32_t subgroupSize = {};
  36646. };
  36647. static_assert( sizeof( PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ), "struct and wrapper have different size!" );
  36648. static_assert( std::is_standard_layout<PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  36649. template <>
  36650. struct CppType<StructureType, StructureType::ePipelineExecutablePropertiesKHR>
  36651. {
  36652. using Type = PipelineExecutablePropertiesKHR;
  36653. };
  36654. union PipelineExecutableStatisticValueKHR
  36655. {
  36656. PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const& rhs ) VULKAN_HPP_NOEXCEPT
  36657. {
  36658. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
  36659. }
  36660. PipelineExecutableStatisticValueKHR( VULKAN_HPP_NAMESPACE::Bool32 b32_ = {} )
  36661. : b32( b32_ )
  36662. {}
  36663. PipelineExecutableStatisticValueKHR( int64_t i64_ )
  36664. : i64( i64_ )
  36665. {}
  36666. PipelineExecutableStatisticValueKHR( uint64_t u64_ )
  36667. : u64( u64_ )
  36668. {}
  36669. PipelineExecutableStatisticValueKHR( double f64_ )
  36670. : f64( f64_ )
  36671. {}
  36672. PipelineExecutableStatisticValueKHR & setB32( VULKAN_HPP_NAMESPACE::Bool32 b32_ ) VULKAN_HPP_NOEXCEPT
  36673. {
  36674. b32 = b32_;
  36675. return *this;
  36676. }
  36677. PipelineExecutableStatisticValueKHR & setI64( int64_t i64_ ) VULKAN_HPP_NOEXCEPT
  36678. {
  36679. i64 = i64_;
  36680. return *this;
  36681. }
  36682. PipelineExecutableStatisticValueKHR & setU64( uint64_t u64_ ) VULKAN_HPP_NOEXCEPT
  36683. {
  36684. u64 = u64_;
  36685. return *this;
  36686. }
  36687. PipelineExecutableStatisticValueKHR & setF64( double f64_ ) VULKAN_HPP_NOEXCEPT
  36688. {
  36689. f64 = f64_;
  36690. return *this;
  36691. }
  36692. VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR & operator=( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36693. {
  36694. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) );
  36695. return *this;
  36696. }
  36697. operator VkPipelineExecutableStatisticValueKHR const&() const
  36698. {
  36699. return *reinterpret_cast<const VkPipelineExecutableStatisticValueKHR*>(this);
  36700. }
  36701. operator VkPipelineExecutableStatisticValueKHR &()
  36702. {
  36703. return *reinterpret_cast<VkPipelineExecutableStatisticValueKHR*>(this);
  36704. }
  36705. #ifdef VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
  36706. VULKAN_HPP_NAMESPACE::Bool32 b32;
  36707. int64_t i64;
  36708. uint64_t u64;
  36709. double f64;
  36710. #else
  36711. VkBool32 b32;
  36712. int64_t i64;
  36713. uint64_t u64;
  36714. double f64;
  36715. #endif /*VULKAN_HPP_HAS_UNRESTRICTED_UNIONS*/
  36716. };
  36717. struct PipelineExecutableStatisticKHR
  36718. {
  36719. static const bool allowDuplicate = false;
  36720. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineExecutableStatisticKHR;
  36721. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36722. PipelineExecutableStatisticKHR(std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format_ = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value_ = {}) VULKAN_HPP_NOEXCEPT
  36723. : name( name_ ), description( description_ ), format( format_ ), value( value_ )
  36724. {}
  36725. PipelineExecutableStatisticKHR( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36726. PipelineExecutableStatisticKHR( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36727. : PipelineExecutableStatisticKHR( *reinterpret_cast<PipelineExecutableStatisticKHR const *>( &rhs ) )
  36728. {}
  36729. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36730. PipelineExecutableStatisticKHR & operator=( PipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36731. PipelineExecutableStatisticKHR & operator=( VkPipelineExecutableStatisticKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36732. {
  36733. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR const *>( &rhs );
  36734. return *this;
  36735. }
  36736. operator VkPipelineExecutableStatisticKHR const&() const VULKAN_HPP_NOEXCEPT
  36737. {
  36738. return *reinterpret_cast<const VkPipelineExecutableStatisticKHR*>( this );
  36739. }
  36740. operator VkPipelineExecutableStatisticKHR &() VULKAN_HPP_NOEXCEPT
  36741. {
  36742. return *reinterpret_cast<VkPipelineExecutableStatisticKHR*>( this );
  36743. }
  36744. public:
  36745. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineExecutableStatisticKHR;
  36746. void* pNext = {};
  36747. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
  36748. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  36749. VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR format = VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR::eBool32;
  36750. VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR value = {};
  36751. };
  36752. static_assert( sizeof( PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ), "struct and wrapper have different size!" );
  36753. static_assert( std::is_standard_layout<PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
  36754. template <>
  36755. struct CppType<StructureType, StructureType::ePipelineExecutableStatisticKHR>
  36756. {
  36757. using Type = PipelineExecutableStatisticKHR;
  36758. };
  36759. struct RefreshCycleDurationGOOGLE
  36760. {
  36761. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36762. VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE(uint64_t refreshDuration_ = {}) VULKAN_HPP_NOEXCEPT
  36763. : refreshDuration( refreshDuration_ )
  36764. {}
  36765. VULKAN_HPP_CONSTEXPR RefreshCycleDurationGOOGLE( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36766. RefreshCycleDurationGOOGLE( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  36767. : RefreshCycleDurationGOOGLE( *reinterpret_cast<RefreshCycleDurationGOOGLE const *>( &rhs ) )
  36768. {}
  36769. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36770. VULKAN_HPP_CONSTEXPR_14 RefreshCycleDurationGOOGLE & operator=( RefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36771. RefreshCycleDurationGOOGLE & operator=( VkRefreshCycleDurationGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  36772. {
  36773. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE const *>( &rhs );
  36774. return *this;
  36775. }
  36776. operator VkRefreshCycleDurationGOOGLE const&() const VULKAN_HPP_NOEXCEPT
  36777. {
  36778. return *reinterpret_cast<const VkRefreshCycleDurationGOOGLE*>( this );
  36779. }
  36780. operator VkRefreshCycleDurationGOOGLE &() VULKAN_HPP_NOEXCEPT
  36781. {
  36782. return *reinterpret_cast<VkRefreshCycleDurationGOOGLE*>( this );
  36783. }
  36784. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36785. auto operator<=>( RefreshCycleDurationGOOGLE const& ) const = default;
  36786. #else
  36787. bool operator==( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  36788. {
  36789. return ( refreshDuration == rhs.refreshDuration );
  36790. }
  36791. bool operator!=( RefreshCycleDurationGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  36792. {
  36793. return !operator==( rhs );
  36794. }
  36795. #endif
  36796. public:
  36797. uint64_t refreshDuration = {};
  36798. };
  36799. static_assert( sizeof( RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ), "struct and wrapper have different size!" );
  36800. static_assert( std::is_standard_layout<RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
  36801. struct SemaphoreGetFdInfoKHR
  36802. {
  36803. static const bool allowDuplicate = false;
  36804. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetFdInfoKHR;
  36805. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36806. VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  36807. : semaphore( semaphore_ ), handleType( handleType_ )
  36808. {}
  36809. VULKAN_HPP_CONSTEXPR SemaphoreGetFdInfoKHR( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36810. SemaphoreGetFdInfoKHR( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36811. : SemaphoreGetFdInfoKHR( *reinterpret_cast<SemaphoreGetFdInfoKHR const *>( &rhs ) )
  36812. {}
  36813. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36814. VULKAN_HPP_CONSTEXPR_14 SemaphoreGetFdInfoKHR & operator=( SemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36815. SemaphoreGetFdInfoKHR & operator=( VkSemaphoreGetFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36816. {
  36817. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR const *>( &rhs );
  36818. return *this;
  36819. }
  36820. SemaphoreGetFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36821. {
  36822. pNext = pNext_;
  36823. return *this;
  36824. }
  36825. SemaphoreGetFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  36826. {
  36827. semaphore = semaphore_;
  36828. return *this;
  36829. }
  36830. SemaphoreGetFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  36831. {
  36832. handleType = handleType_;
  36833. return *this;
  36834. }
  36835. operator VkSemaphoreGetFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36836. {
  36837. return *reinterpret_cast<const VkSemaphoreGetFdInfoKHR*>( this );
  36838. }
  36839. operator VkSemaphoreGetFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  36840. {
  36841. return *reinterpret_cast<VkSemaphoreGetFdInfoKHR*>( this );
  36842. }
  36843. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36844. auto operator<=>( SemaphoreGetFdInfoKHR const& ) const = default;
  36845. #else
  36846. bool operator==( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36847. {
  36848. return ( sType == rhs.sType )
  36849. && ( pNext == rhs.pNext )
  36850. && ( semaphore == rhs.semaphore )
  36851. && ( handleType == rhs.handleType );
  36852. }
  36853. bool operator!=( SemaphoreGetFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36854. {
  36855. return !operator==( rhs );
  36856. }
  36857. #endif
  36858. public:
  36859. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetFdInfoKHR;
  36860. const void* pNext = {};
  36861. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  36862. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  36863. };
  36864. static_assert( sizeof( SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ), "struct and wrapper have different size!" );
  36865. static_assert( std::is_standard_layout<SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  36866. template <>
  36867. struct CppType<StructureType, StructureType::eSemaphoreGetFdInfoKHR>
  36868. {
  36869. using Type = SemaphoreGetFdInfoKHR;
  36870. };
  36871. #ifdef VK_USE_PLATFORM_WIN32_KHR
  36872. struct SemaphoreGetWin32HandleInfoKHR
  36873. {
  36874. static const bool allowDuplicate = false;
  36875. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
  36876. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36877. VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  36878. : semaphore( semaphore_ ), handleType( handleType_ )
  36879. {}
  36880. VULKAN_HPP_CONSTEXPR SemaphoreGetWin32HandleInfoKHR( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36881. SemaphoreGetWin32HandleInfoKHR( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36882. : SemaphoreGetWin32HandleInfoKHR( *reinterpret_cast<SemaphoreGetWin32HandleInfoKHR const *>( &rhs ) )
  36883. {}
  36884. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36885. VULKAN_HPP_CONSTEXPR_14 SemaphoreGetWin32HandleInfoKHR & operator=( SemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36886. SemaphoreGetWin32HandleInfoKHR & operator=( VkSemaphoreGetWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36887. {
  36888. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR const *>( &rhs );
  36889. return *this;
  36890. }
  36891. SemaphoreGetWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36892. {
  36893. pNext = pNext_;
  36894. return *this;
  36895. }
  36896. SemaphoreGetWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  36897. {
  36898. semaphore = semaphore_;
  36899. return *this;
  36900. }
  36901. SemaphoreGetWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  36902. {
  36903. handleType = handleType_;
  36904. return *this;
  36905. }
  36906. operator VkSemaphoreGetWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36907. {
  36908. return *reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR*>( this );
  36909. }
  36910. operator VkSemaphoreGetWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  36911. {
  36912. return *reinterpret_cast<VkSemaphoreGetWin32HandleInfoKHR*>( this );
  36913. }
  36914. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36915. auto operator<=>( SemaphoreGetWin32HandleInfoKHR const& ) const = default;
  36916. #else
  36917. bool operator==( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36918. {
  36919. return ( sType == rhs.sType )
  36920. && ( pNext == rhs.pNext )
  36921. && ( semaphore == rhs.semaphore )
  36922. && ( handleType == rhs.handleType );
  36923. }
  36924. bool operator!=( SemaphoreGetWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36925. {
  36926. return !operator==( rhs );
  36927. }
  36928. #endif
  36929. public:
  36930. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreGetWin32HandleInfoKHR;
  36931. const void* pNext = {};
  36932. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  36933. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  36934. };
  36935. static_assert( sizeof( SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  36936. static_assert( std::is_standard_layout<SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  36937. template <>
  36938. struct CppType<StructureType, StructureType::eSemaphoreGetWin32HandleInfoKHR>
  36939. {
  36940. using Type = SemaphoreGetWin32HandleInfoKHR;
  36941. };
  36942. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  36943. struct ImportFenceFdInfoKHR
  36944. {
  36945. static const bool allowDuplicate = false;
  36946. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceFdInfoKHR;
  36947. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36948. VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
  36949. : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
  36950. {}
  36951. VULKAN_HPP_CONSTEXPR ImportFenceFdInfoKHR( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36952. ImportFenceFdInfoKHR( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36953. : ImportFenceFdInfoKHR( *reinterpret_cast<ImportFenceFdInfoKHR const *>( &rhs ) )
  36954. {}
  36955. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  36956. VULKAN_HPP_CONSTEXPR_14 ImportFenceFdInfoKHR & operator=( ImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  36957. ImportFenceFdInfoKHR & operator=( VkImportFenceFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  36958. {
  36959. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR const *>( &rhs );
  36960. return *this;
  36961. }
  36962. ImportFenceFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  36963. {
  36964. pNext = pNext_;
  36965. return *this;
  36966. }
  36967. ImportFenceFdInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
  36968. {
  36969. fence = fence_;
  36970. return *this;
  36971. }
  36972. ImportFenceFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
  36973. {
  36974. flags = flags_;
  36975. return *this;
  36976. }
  36977. ImportFenceFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  36978. {
  36979. handleType = handleType_;
  36980. return *this;
  36981. }
  36982. ImportFenceFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
  36983. {
  36984. fd = fd_;
  36985. return *this;
  36986. }
  36987. operator VkImportFenceFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  36988. {
  36989. return *reinterpret_cast<const VkImportFenceFdInfoKHR*>( this );
  36990. }
  36991. operator VkImportFenceFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  36992. {
  36993. return *reinterpret_cast<VkImportFenceFdInfoKHR*>( this );
  36994. }
  36995. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  36996. auto operator<=>( ImportFenceFdInfoKHR const& ) const = default;
  36997. #else
  36998. bool operator==( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  36999. {
  37000. return ( sType == rhs.sType )
  37001. && ( pNext == rhs.pNext )
  37002. && ( fence == rhs.fence )
  37003. && ( flags == rhs.flags )
  37004. && ( handleType == rhs.handleType )
  37005. && ( fd == rhs.fd );
  37006. }
  37007. bool operator!=( ImportFenceFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37008. {
  37009. return !operator==( rhs );
  37010. }
  37011. #endif
  37012. public:
  37013. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceFdInfoKHR;
  37014. const void* pNext = {};
  37015. VULKAN_HPP_NAMESPACE::Fence fence = {};
  37016. VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
  37017. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  37018. int fd = {};
  37019. };
  37020. static_assert( sizeof( ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
  37021. static_assert( std::is_standard_layout<ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  37022. template <>
  37023. struct CppType<StructureType, StructureType::eImportFenceFdInfoKHR>
  37024. {
  37025. using Type = ImportFenceFdInfoKHR;
  37026. };
  37027. #ifdef VK_USE_PLATFORM_WIN32_KHR
  37028. struct ImportFenceWin32HandleInfoKHR
  37029. {
  37030. static const bool allowDuplicate = false;
  37031. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportFenceWin32HandleInfoKHR;
  37032. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37033. VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Fence fence_ = {}, VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  37034. : fence( fence_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
  37035. {}
  37036. VULKAN_HPP_CONSTEXPR ImportFenceWin32HandleInfoKHR( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37037. ImportFenceWin32HandleInfoKHR( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37038. : ImportFenceWin32HandleInfoKHR( *reinterpret_cast<ImportFenceWin32HandleInfoKHR const *>( &rhs ) )
  37039. {}
  37040. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37041. VULKAN_HPP_CONSTEXPR_14 ImportFenceWin32HandleInfoKHR & operator=( ImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37042. ImportFenceWin32HandleInfoKHR & operator=( VkImportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37043. {
  37044. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR const *>( &rhs );
  37045. return *this;
  37046. }
  37047. ImportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37048. {
  37049. pNext = pNext_;
  37050. return *this;
  37051. }
  37052. ImportFenceWin32HandleInfoKHR & setFence( VULKAN_HPP_NAMESPACE::Fence fence_ ) VULKAN_HPP_NOEXCEPT
  37053. {
  37054. fence = fence_;
  37055. return *this;
  37056. }
  37057. ImportFenceWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::FenceImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
  37058. {
  37059. flags = flags_;
  37060. return *this;
  37061. }
  37062. ImportFenceWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  37063. {
  37064. handleType = handleType_;
  37065. return *this;
  37066. }
  37067. ImportFenceWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
  37068. {
  37069. handle = handle_;
  37070. return *this;
  37071. }
  37072. ImportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  37073. {
  37074. name = name_;
  37075. return *this;
  37076. }
  37077. operator VkImportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  37078. {
  37079. return *reinterpret_cast<const VkImportFenceWin32HandleInfoKHR*>( this );
  37080. }
  37081. operator VkImportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  37082. {
  37083. return *reinterpret_cast<VkImportFenceWin32HandleInfoKHR*>( this );
  37084. }
  37085. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37086. auto operator<=>( ImportFenceWin32HandleInfoKHR const& ) const = default;
  37087. #else
  37088. bool operator==( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37089. {
  37090. return ( sType == rhs.sType )
  37091. && ( pNext == rhs.pNext )
  37092. && ( fence == rhs.fence )
  37093. && ( flags == rhs.flags )
  37094. && ( handleType == rhs.handleType )
  37095. && ( handle == rhs.handle )
  37096. && ( name == rhs.name );
  37097. }
  37098. bool operator!=( ImportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37099. {
  37100. return !operator==( rhs );
  37101. }
  37102. #endif
  37103. public:
  37104. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportFenceWin32HandleInfoKHR;
  37105. const void* pNext = {};
  37106. VULKAN_HPP_NAMESPACE::Fence fence = {};
  37107. VULKAN_HPP_NAMESPACE::FenceImportFlags flags = {};
  37108. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  37109. HANDLE handle = {};
  37110. LPCWSTR name = {};
  37111. };
  37112. static_assert( sizeof( ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  37113. static_assert( std::is_standard_layout<ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  37114. template <>
  37115. struct CppType<StructureType, StructureType::eImportFenceWin32HandleInfoKHR>
  37116. {
  37117. using Type = ImportFenceWin32HandleInfoKHR;
  37118. };
  37119. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  37120. struct ImportSemaphoreFdInfoKHR
  37121. {
  37122. static const bool allowDuplicate = false;
  37123. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreFdInfoKHR;
  37124. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37125. VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
  37126. : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), fd( fd_ )
  37127. {}
  37128. VULKAN_HPP_CONSTEXPR ImportSemaphoreFdInfoKHR( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37129. ImportSemaphoreFdInfoKHR( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37130. : ImportSemaphoreFdInfoKHR( *reinterpret_cast<ImportSemaphoreFdInfoKHR const *>( &rhs ) )
  37131. {}
  37132. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37133. VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreFdInfoKHR & operator=( ImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37134. ImportSemaphoreFdInfoKHR & operator=( VkImportSemaphoreFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37135. {
  37136. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR const *>( &rhs );
  37137. return *this;
  37138. }
  37139. ImportSemaphoreFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37140. {
  37141. pNext = pNext_;
  37142. return *this;
  37143. }
  37144. ImportSemaphoreFdInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  37145. {
  37146. semaphore = semaphore_;
  37147. return *this;
  37148. }
  37149. ImportSemaphoreFdInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
  37150. {
  37151. flags = flags_;
  37152. return *this;
  37153. }
  37154. ImportSemaphoreFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  37155. {
  37156. handleType = handleType_;
  37157. return *this;
  37158. }
  37159. ImportSemaphoreFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
  37160. {
  37161. fd = fd_;
  37162. return *this;
  37163. }
  37164. operator VkImportSemaphoreFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  37165. {
  37166. return *reinterpret_cast<const VkImportSemaphoreFdInfoKHR*>( this );
  37167. }
  37168. operator VkImportSemaphoreFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  37169. {
  37170. return *reinterpret_cast<VkImportSemaphoreFdInfoKHR*>( this );
  37171. }
  37172. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37173. auto operator<=>( ImportSemaphoreFdInfoKHR const& ) const = default;
  37174. #else
  37175. bool operator==( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37176. {
  37177. return ( sType == rhs.sType )
  37178. && ( pNext == rhs.pNext )
  37179. && ( semaphore == rhs.semaphore )
  37180. && ( flags == rhs.flags )
  37181. && ( handleType == rhs.handleType )
  37182. && ( fd == rhs.fd );
  37183. }
  37184. bool operator!=( ImportSemaphoreFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37185. {
  37186. return !operator==( rhs );
  37187. }
  37188. #endif
  37189. public:
  37190. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreFdInfoKHR;
  37191. const void* pNext = {};
  37192. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  37193. VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
  37194. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  37195. int fd = {};
  37196. };
  37197. static_assert( sizeof( ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ), "struct and wrapper have different size!" );
  37198. static_assert( std::is_standard_layout<ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  37199. template <>
  37200. struct CppType<StructureType, StructureType::eImportSemaphoreFdInfoKHR>
  37201. {
  37202. using Type = ImportSemaphoreFdInfoKHR;
  37203. };
  37204. #ifdef VK_USE_PLATFORM_WIN32_KHR
  37205. struct ImportSemaphoreWin32HandleInfoKHR
  37206. {
  37207. static const bool allowDuplicate = false;
  37208. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
  37209. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37210. VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  37211. : semaphore( semaphore_ ), flags( flags_ ), handleType( handleType_ ), handle( handle_ ), name( name_ )
  37212. {}
  37213. VULKAN_HPP_CONSTEXPR ImportSemaphoreWin32HandleInfoKHR( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37214. ImportSemaphoreWin32HandleInfoKHR( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37215. : ImportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ImportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
  37216. {}
  37217. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37218. VULKAN_HPP_CONSTEXPR_14 ImportSemaphoreWin32HandleInfoKHR & operator=( ImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37219. ImportSemaphoreWin32HandleInfoKHR & operator=( VkImportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  37220. {
  37221. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR const *>( &rhs );
  37222. return *this;
  37223. }
  37224. ImportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37225. {
  37226. pNext = pNext_;
  37227. return *this;
  37228. }
  37229. ImportSemaphoreWin32HandleInfoKHR & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  37230. {
  37231. semaphore = semaphore_;
  37232. return *this;
  37233. }
  37234. ImportSemaphoreWin32HandleInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags_ ) VULKAN_HPP_NOEXCEPT
  37235. {
  37236. flags = flags_;
  37237. return *this;
  37238. }
  37239. ImportSemaphoreWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  37240. {
  37241. handleType = handleType_;
  37242. return *this;
  37243. }
  37244. ImportSemaphoreWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
  37245. {
  37246. handle = handle_;
  37247. return *this;
  37248. }
  37249. ImportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  37250. {
  37251. name = name_;
  37252. return *this;
  37253. }
  37254. operator VkImportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  37255. {
  37256. return *reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR*>( this );
  37257. }
  37258. operator VkImportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  37259. {
  37260. return *reinterpret_cast<VkImportSemaphoreWin32HandleInfoKHR*>( this );
  37261. }
  37262. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37263. auto operator<=>( ImportSemaphoreWin32HandleInfoKHR const& ) const = default;
  37264. #else
  37265. bool operator==( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37266. {
  37267. return ( sType == rhs.sType )
  37268. && ( pNext == rhs.pNext )
  37269. && ( semaphore == rhs.semaphore )
  37270. && ( flags == rhs.flags )
  37271. && ( handleType == rhs.handleType )
  37272. && ( handle == rhs.handle )
  37273. && ( name == rhs.name );
  37274. }
  37275. bool operator!=( ImportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  37276. {
  37277. return !operator==( rhs );
  37278. }
  37279. #endif
  37280. public:
  37281. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportSemaphoreWin32HandleInfoKHR;
  37282. const void* pNext = {};
  37283. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  37284. VULKAN_HPP_NAMESPACE::SemaphoreImportFlags flags = {};
  37285. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  37286. HANDLE handle = {};
  37287. LPCWSTR name = {};
  37288. };
  37289. static_assert( sizeof( ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  37290. static_assert( std::is_standard_layout<ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  37291. template <>
  37292. struct CppType<StructureType, StructureType::eImportSemaphoreWin32HandleInfoKHR>
  37293. {
  37294. using Type = ImportSemaphoreWin32HandleInfoKHR;
  37295. };
  37296. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  37297. struct InitializePerformanceApiInfoINTEL
  37298. {
  37299. static const bool allowDuplicate = false;
  37300. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInitializePerformanceApiInfoINTEL;
  37301. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37302. VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL(void* pUserData_ = {}) VULKAN_HPP_NOEXCEPT
  37303. : pUserData( pUserData_ )
  37304. {}
  37305. VULKAN_HPP_CONSTEXPR InitializePerformanceApiInfoINTEL( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37306. InitializePerformanceApiInfoINTEL( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  37307. : InitializePerformanceApiInfoINTEL( *reinterpret_cast<InitializePerformanceApiInfoINTEL const *>( &rhs ) )
  37308. {}
  37309. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37310. VULKAN_HPP_CONSTEXPR_14 InitializePerformanceApiInfoINTEL & operator=( InitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37311. InitializePerformanceApiInfoINTEL & operator=( VkInitializePerformanceApiInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  37312. {
  37313. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL const *>( &rhs );
  37314. return *this;
  37315. }
  37316. InitializePerformanceApiInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37317. {
  37318. pNext = pNext_;
  37319. return *this;
  37320. }
  37321. InitializePerformanceApiInfoINTEL & setPUserData( void* pUserData_ ) VULKAN_HPP_NOEXCEPT
  37322. {
  37323. pUserData = pUserData_;
  37324. return *this;
  37325. }
  37326. operator VkInitializePerformanceApiInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  37327. {
  37328. return *reinterpret_cast<const VkInitializePerformanceApiInfoINTEL*>( this );
  37329. }
  37330. operator VkInitializePerformanceApiInfoINTEL &() VULKAN_HPP_NOEXCEPT
  37331. {
  37332. return *reinterpret_cast<VkInitializePerformanceApiInfoINTEL*>( this );
  37333. }
  37334. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37335. auto operator<=>( InitializePerformanceApiInfoINTEL const& ) const = default;
  37336. #else
  37337. bool operator==( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  37338. {
  37339. return ( sType == rhs.sType )
  37340. && ( pNext == rhs.pNext )
  37341. && ( pUserData == rhs.pUserData );
  37342. }
  37343. bool operator!=( InitializePerformanceApiInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  37344. {
  37345. return !operator==( rhs );
  37346. }
  37347. #endif
  37348. public:
  37349. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInitializePerformanceApiInfoINTEL;
  37350. const void* pNext = {};
  37351. void* pUserData = {};
  37352. };
  37353. static_assert( sizeof( InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ), "struct and wrapper have different size!" );
  37354. static_assert( std::is_standard_layout<InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  37355. template <>
  37356. struct CppType<StructureType, StructureType::eInitializePerformanceApiInfoINTEL>
  37357. {
  37358. using Type = InitializePerformanceApiInfoINTEL;
  37359. };
  37360. struct DisplayEventInfoEXT
  37361. {
  37362. static const bool allowDuplicate = false;
  37363. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayEventInfoEXT;
  37364. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37365. VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT(VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut) VULKAN_HPP_NOEXCEPT
  37366. : displayEvent( displayEvent_ )
  37367. {}
  37368. VULKAN_HPP_CONSTEXPR DisplayEventInfoEXT( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37369. DisplayEventInfoEXT( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37370. : DisplayEventInfoEXT( *reinterpret_cast<DisplayEventInfoEXT const *>( &rhs ) )
  37371. {}
  37372. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37373. VULKAN_HPP_CONSTEXPR_14 DisplayEventInfoEXT & operator=( DisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37374. DisplayEventInfoEXT & operator=( VkDisplayEventInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37375. {
  37376. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT const *>( &rhs );
  37377. return *this;
  37378. }
  37379. DisplayEventInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37380. {
  37381. pNext = pNext_;
  37382. return *this;
  37383. }
  37384. DisplayEventInfoEXT & setDisplayEvent( VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent_ ) VULKAN_HPP_NOEXCEPT
  37385. {
  37386. displayEvent = displayEvent_;
  37387. return *this;
  37388. }
  37389. operator VkDisplayEventInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  37390. {
  37391. return *reinterpret_cast<const VkDisplayEventInfoEXT*>( this );
  37392. }
  37393. operator VkDisplayEventInfoEXT &() VULKAN_HPP_NOEXCEPT
  37394. {
  37395. return *reinterpret_cast<VkDisplayEventInfoEXT*>( this );
  37396. }
  37397. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37398. auto operator<=>( DisplayEventInfoEXT const& ) const = default;
  37399. #else
  37400. bool operator==( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37401. {
  37402. return ( sType == rhs.sType )
  37403. && ( pNext == rhs.pNext )
  37404. && ( displayEvent == rhs.displayEvent );
  37405. }
  37406. bool operator!=( DisplayEventInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37407. {
  37408. return !operator==( rhs );
  37409. }
  37410. #endif
  37411. public:
  37412. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayEventInfoEXT;
  37413. const void* pNext = {};
  37414. VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT displayEvent = VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT::eFirstPixelOut;
  37415. };
  37416. static_assert( sizeof( DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
  37417. static_assert( std::is_standard_layout<DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
  37418. template <>
  37419. struct CppType<StructureType, StructureType::eDisplayEventInfoEXT>
  37420. {
  37421. using Type = DisplayEventInfoEXT;
  37422. };
  37423. struct XYColorEXT
  37424. {
  37425. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37426. VULKAN_HPP_CONSTEXPR XYColorEXT(float x_ = {}, float y_ = {}) VULKAN_HPP_NOEXCEPT
  37427. : x( x_ ), y( y_ )
  37428. {}
  37429. VULKAN_HPP_CONSTEXPR XYColorEXT( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37430. XYColorEXT( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37431. : XYColorEXT( *reinterpret_cast<XYColorEXT const *>( &rhs ) )
  37432. {}
  37433. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37434. VULKAN_HPP_CONSTEXPR_14 XYColorEXT & operator=( XYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37435. XYColorEXT & operator=( VkXYColorEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37436. {
  37437. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XYColorEXT const *>( &rhs );
  37438. return *this;
  37439. }
  37440. XYColorEXT & setX( float x_ ) VULKAN_HPP_NOEXCEPT
  37441. {
  37442. x = x_;
  37443. return *this;
  37444. }
  37445. XYColorEXT & setY( float y_ ) VULKAN_HPP_NOEXCEPT
  37446. {
  37447. y = y_;
  37448. return *this;
  37449. }
  37450. operator VkXYColorEXT const&() const VULKAN_HPP_NOEXCEPT
  37451. {
  37452. return *reinterpret_cast<const VkXYColorEXT*>( this );
  37453. }
  37454. operator VkXYColorEXT &() VULKAN_HPP_NOEXCEPT
  37455. {
  37456. return *reinterpret_cast<VkXYColorEXT*>( this );
  37457. }
  37458. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37459. auto operator<=>( XYColorEXT const& ) const = default;
  37460. #else
  37461. bool operator==( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37462. {
  37463. return ( x == rhs.x )
  37464. && ( y == rhs.y );
  37465. }
  37466. bool operator!=( XYColorEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37467. {
  37468. return !operator==( rhs );
  37469. }
  37470. #endif
  37471. public:
  37472. float x = {};
  37473. float y = {};
  37474. };
  37475. static_assert( sizeof( XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
  37476. static_assert( std::is_standard_layout<XYColorEXT>::value, "struct wrapper is not a standard layout!" );
  37477. struct HdrMetadataEXT
  37478. {
  37479. static const bool allowDuplicate = false;
  37480. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHdrMetadataEXT;
  37481. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37482. VULKAN_HPP_CONSTEXPR HdrMetadataEXT(VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue_ = {}, VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint_ = {}, float maxLuminance_ = {}, float minLuminance_ = {}, float maxContentLightLevel_ = {}, float maxFrameAverageLightLevel_ = {}) VULKAN_HPP_NOEXCEPT
  37483. : displayPrimaryRed( displayPrimaryRed_ ), displayPrimaryGreen( displayPrimaryGreen_ ), displayPrimaryBlue( displayPrimaryBlue_ ), whitePoint( whitePoint_ ), maxLuminance( maxLuminance_ ), minLuminance( minLuminance_ ), maxContentLightLevel( maxContentLightLevel_ ), maxFrameAverageLightLevel( maxFrameAverageLightLevel_ )
  37484. {}
  37485. VULKAN_HPP_CONSTEXPR HdrMetadataEXT( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37486. HdrMetadataEXT( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37487. : HdrMetadataEXT( *reinterpret_cast<HdrMetadataEXT const *>( &rhs ) )
  37488. {}
  37489. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37490. VULKAN_HPP_CONSTEXPR_14 HdrMetadataEXT & operator=( HdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37491. HdrMetadataEXT & operator=( VkHdrMetadataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  37492. {
  37493. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HdrMetadataEXT const *>( &rhs );
  37494. return *this;
  37495. }
  37496. HdrMetadataEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37497. {
  37498. pNext = pNext_;
  37499. return *this;
  37500. }
  37501. HdrMetadataEXT & setDisplayPrimaryRed( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryRed_ ) VULKAN_HPP_NOEXCEPT
  37502. {
  37503. displayPrimaryRed = displayPrimaryRed_;
  37504. return *this;
  37505. }
  37506. HdrMetadataEXT & setDisplayPrimaryGreen( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryGreen_ ) VULKAN_HPP_NOEXCEPT
  37507. {
  37508. displayPrimaryGreen = displayPrimaryGreen_;
  37509. return *this;
  37510. }
  37511. HdrMetadataEXT & setDisplayPrimaryBlue( VULKAN_HPP_NAMESPACE::XYColorEXT const & displayPrimaryBlue_ ) VULKAN_HPP_NOEXCEPT
  37512. {
  37513. displayPrimaryBlue = displayPrimaryBlue_;
  37514. return *this;
  37515. }
  37516. HdrMetadataEXT & setWhitePoint( VULKAN_HPP_NAMESPACE::XYColorEXT const & whitePoint_ ) VULKAN_HPP_NOEXCEPT
  37517. {
  37518. whitePoint = whitePoint_;
  37519. return *this;
  37520. }
  37521. HdrMetadataEXT & setMaxLuminance( float maxLuminance_ ) VULKAN_HPP_NOEXCEPT
  37522. {
  37523. maxLuminance = maxLuminance_;
  37524. return *this;
  37525. }
  37526. HdrMetadataEXT & setMinLuminance( float minLuminance_ ) VULKAN_HPP_NOEXCEPT
  37527. {
  37528. minLuminance = minLuminance_;
  37529. return *this;
  37530. }
  37531. HdrMetadataEXT & setMaxContentLightLevel( float maxContentLightLevel_ ) VULKAN_HPP_NOEXCEPT
  37532. {
  37533. maxContentLightLevel = maxContentLightLevel_;
  37534. return *this;
  37535. }
  37536. HdrMetadataEXT & setMaxFrameAverageLightLevel( float maxFrameAverageLightLevel_ ) VULKAN_HPP_NOEXCEPT
  37537. {
  37538. maxFrameAverageLightLevel = maxFrameAverageLightLevel_;
  37539. return *this;
  37540. }
  37541. operator VkHdrMetadataEXT const&() const VULKAN_HPP_NOEXCEPT
  37542. {
  37543. return *reinterpret_cast<const VkHdrMetadataEXT*>( this );
  37544. }
  37545. operator VkHdrMetadataEXT &() VULKAN_HPP_NOEXCEPT
  37546. {
  37547. return *reinterpret_cast<VkHdrMetadataEXT*>( this );
  37548. }
  37549. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37550. auto operator<=>( HdrMetadataEXT const& ) const = default;
  37551. #else
  37552. bool operator==( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37553. {
  37554. return ( sType == rhs.sType )
  37555. && ( pNext == rhs.pNext )
  37556. && ( displayPrimaryRed == rhs.displayPrimaryRed )
  37557. && ( displayPrimaryGreen == rhs.displayPrimaryGreen )
  37558. && ( displayPrimaryBlue == rhs.displayPrimaryBlue )
  37559. && ( whitePoint == rhs.whitePoint )
  37560. && ( maxLuminance == rhs.maxLuminance )
  37561. && ( minLuminance == rhs.minLuminance )
  37562. && ( maxContentLightLevel == rhs.maxContentLightLevel )
  37563. && ( maxFrameAverageLightLevel == rhs.maxFrameAverageLightLevel );
  37564. }
  37565. bool operator!=( HdrMetadataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  37566. {
  37567. return !operator==( rhs );
  37568. }
  37569. #endif
  37570. public:
  37571. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHdrMetadataEXT;
  37572. const void* pNext = {};
  37573. VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryRed = {};
  37574. VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryGreen = {};
  37575. VULKAN_HPP_NAMESPACE::XYColorEXT displayPrimaryBlue = {};
  37576. VULKAN_HPP_NAMESPACE::XYColorEXT whitePoint = {};
  37577. float maxLuminance = {};
  37578. float minLuminance = {};
  37579. float maxContentLightLevel = {};
  37580. float maxFrameAverageLightLevel = {};
  37581. };
  37582. static_assert( sizeof( HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
  37583. static_assert( std::is_standard_layout<HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
  37584. template <>
  37585. struct CppType<StructureType, StructureType::eHdrMetadataEXT>
  37586. {
  37587. using Type = HdrMetadataEXT;
  37588. };
  37589. struct SemaphoreSignalInfo
  37590. {
  37591. static const bool allowDuplicate = false;
  37592. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreSignalInfo;
  37593. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37594. VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo(VULKAN_HPP_NAMESPACE::Semaphore semaphore_ = {}, uint64_t value_ = {}) VULKAN_HPP_NOEXCEPT
  37595. : semaphore( semaphore_ ), value( value_ )
  37596. {}
  37597. VULKAN_HPP_CONSTEXPR SemaphoreSignalInfo( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37598. SemaphoreSignalInfo( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  37599. : SemaphoreSignalInfo( *reinterpret_cast<SemaphoreSignalInfo const *>( &rhs ) )
  37600. {}
  37601. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37602. VULKAN_HPP_CONSTEXPR_14 SemaphoreSignalInfo & operator=( SemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37603. SemaphoreSignalInfo & operator=( VkSemaphoreSignalInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  37604. {
  37605. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo const *>( &rhs );
  37606. return *this;
  37607. }
  37608. SemaphoreSignalInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37609. {
  37610. pNext = pNext_;
  37611. return *this;
  37612. }
  37613. SemaphoreSignalInfo & setSemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore_ ) VULKAN_HPP_NOEXCEPT
  37614. {
  37615. semaphore = semaphore_;
  37616. return *this;
  37617. }
  37618. SemaphoreSignalInfo & setValue( uint64_t value_ ) VULKAN_HPP_NOEXCEPT
  37619. {
  37620. value = value_;
  37621. return *this;
  37622. }
  37623. operator VkSemaphoreSignalInfo const&() const VULKAN_HPP_NOEXCEPT
  37624. {
  37625. return *reinterpret_cast<const VkSemaphoreSignalInfo*>( this );
  37626. }
  37627. operator VkSemaphoreSignalInfo &() VULKAN_HPP_NOEXCEPT
  37628. {
  37629. return *reinterpret_cast<VkSemaphoreSignalInfo*>( this );
  37630. }
  37631. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37632. auto operator<=>( SemaphoreSignalInfo const& ) const = default;
  37633. #else
  37634. bool operator==( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  37635. {
  37636. return ( sType == rhs.sType )
  37637. && ( pNext == rhs.pNext )
  37638. && ( semaphore == rhs.semaphore )
  37639. && ( value == rhs.value );
  37640. }
  37641. bool operator!=( SemaphoreSignalInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  37642. {
  37643. return !operator==( rhs );
  37644. }
  37645. #endif
  37646. public:
  37647. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreSignalInfo;
  37648. const void* pNext = {};
  37649. VULKAN_HPP_NAMESPACE::Semaphore semaphore = {};
  37650. uint64_t value = {};
  37651. };
  37652. static_assert( sizeof( SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
  37653. static_assert( std::is_standard_layout<SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
  37654. template <>
  37655. struct CppType<StructureType, StructureType::eSemaphoreSignalInfo>
  37656. {
  37657. using Type = SemaphoreSignalInfo;
  37658. };
  37659. using SemaphoreSignalInfoKHR = SemaphoreSignalInfo;
  37660. struct SemaphoreWaitInfo
  37661. {
  37662. static const bool allowDuplicate = false;
  37663. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreWaitInfo;
  37664. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37665. VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo(VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ = {}, uint32_t semaphoreCount_ = {}, const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ = {}, const uint64_t* pValues_ = {}) VULKAN_HPP_NOEXCEPT
  37666. : flags( flags_ ), semaphoreCount( semaphoreCount_ ), pSemaphores( pSemaphores_ ), pValues( pValues_ )
  37667. {}
  37668. VULKAN_HPP_CONSTEXPR SemaphoreWaitInfo( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37669. SemaphoreWaitInfo( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  37670. : SemaphoreWaitInfo( *reinterpret_cast<SemaphoreWaitInfo const *>( &rhs ) )
  37671. {}
  37672. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37673. SemaphoreWaitInfo( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ = {} )
  37674. : flags( flags_ ), semaphoreCount( static_cast<uint32_t>( semaphores_.size() ) ), pSemaphores( semaphores_.data() ), pValues( values_.data() )
  37675. {
  37676. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  37677. VULKAN_HPP_ASSERT( semaphores_.size() == values_.size() );
  37678. #else
  37679. if ( semaphores_.size() != values_.size() )
  37680. {
  37681. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::SemaphoreWaitInfo::SemaphoreWaitInfo: semaphores_.size() != values_.size()" );
  37682. }
  37683. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  37684. }
  37685. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37686. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  37687. VULKAN_HPP_CONSTEXPR_14 SemaphoreWaitInfo & operator=( SemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  37688. SemaphoreWaitInfo & operator=( VkSemaphoreWaitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  37689. {
  37690. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo const *>( &rhs );
  37691. return *this;
  37692. }
  37693. SemaphoreWaitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  37694. {
  37695. pNext = pNext_;
  37696. return *this;
  37697. }
  37698. SemaphoreWaitInfo & setFlags( VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags_ ) VULKAN_HPP_NOEXCEPT
  37699. {
  37700. flags = flags_;
  37701. return *this;
  37702. }
  37703. SemaphoreWaitInfo & setSemaphoreCount( uint32_t semaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  37704. {
  37705. semaphoreCount = semaphoreCount_;
  37706. return *this;
  37707. }
  37708. SemaphoreWaitInfo & setPSemaphores( const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores_ ) VULKAN_HPP_NOEXCEPT
  37709. {
  37710. pSemaphores = pSemaphores_;
  37711. return *this;
  37712. }
  37713. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37714. SemaphoreWaitInfo & setSemaphores( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Semaphore> const & semaphores_ ) VULKAN_HPP_NOEXCEPT
  37715. {
  37716. semaphoreCount = static_cast<uint32_t>( semaphores_.size() );
  37717. pSemaphores = semaphores_.data();
  37718. return *this;
  37719. }
  37720. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37721. SemaphoreWaitInfo & setPValues( const uint64_t* pValues_ ) VULKAN_HPP_NOEXCEPT
  37722. {
  37723. pValues = pValues_;
  37724. return *this;
  37725. }
  37726. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37727. SemaphoreWaitInfo & setValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & values_ ) VULKAN_HPP_NOEXCEPT
  37728. {
  37729. semaphoreCount = static_cast<uint32_t>( values_.size() );
  37730. pValues = values_.data();
  37731. return *this;
  37732. }
  37733. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  37734. operator VkSemaphoreWaitInfo const&() const VULKAN_HPP_NOEXCEPT
  37735. {
  37736. return *reinterpret_cast<const VkSemaphoreWaitInfo*>( this );
  37737. }
  37738. operator VkSemaphoreWaitInfo &() VULKAN_HPP_NOEXCEPT
  37739. {
  37740. return *reinterpret_cast<VkSemaphoreWaitInfo*>( this );
  37741. }
  37742. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37743. auto operator<=>( SemaphoreWaitInfo const& ) const = default;
  37744. #else
  37745. bool operator==( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  37746. {
  37747. return ( sType == rhs.sType )
  37748. && ( pNext == rhs.pNext )
  37749. && ( flags == rhs.flags )
  37750. && ( semaphoreCount == rhs.semaphoreCount )
  37751. && ( pSemaphores == rhs.pSemaphores )
  37752. && ( pValues == rhs.pValues );
  37753. }
  37754. bool operator!=( SemaphoreWaitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  37755. {
  37756. return !operator==( rhs );
  37757. }
  37758. #endif
  37759. public:
  37760. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreWaitInfo;
  37761. const void* pNext = {};
  37762. VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags flags = {};
  37763. uint32_t semaphoreCount = {};
  37764. const VULKAN_HPP_NAMESPACE::Semaphore* pSemaphores = {};
  37765. const uint64_t* pValues = {};
  37766. };
  37767. static_assert( sizeof( SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
  37768. static_assert( std::is_standard_layout<SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
  37769. template <>
  37770. struct CppType<StructureType, StructureType::eSemaphoreWaitInfo>
  37771. {
  37772. using Type = SemaphoreWaitInfo;
  37773. };
  37774. using SemaphoreWaitInfoKHR = SemaphoreWaitInfo;
  37775. #ifndef VULKAN_HPP_NO_SMART_HANDLE
  37776. class Device;
  37777. template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37778. using UniqueAccelerationStructureKHR = UniqueHandle<AccelerationStructureKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37779. template <typename Dispatch> class UniqueHandleTraits<AccelerationStructureNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37780. using UniqueAccelerationStructureNV = UniqueHandle<AccelerationStructureNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37781. template <typename Dispatch> class UniqueHandleTraits<Buffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37782. using UniqueBuffer = UniqueHandle<Buffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37783. template <typename Dispatch> class UniqueHandleTraits<BufferView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37784. using UniqueBufferView = UniqueHandle<BufferView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37785. template <typename Dispatch> class UniqueHandleTraits<CommandBuffer, Dispatch> { public: using deleter = PoolFree<Device, CommandPool, Dispatch>; };
  37786. using UniqueCommandBuffer = UniqueHandle<CommandBuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37787. template <typename Dispatch> class UniqueHandleTraits<CommandPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37788. using UniqueCommandPool = UniqueHandle<CommandPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37789. template <typename Dispatch> class UniqueHandleTraits<DeferredOperationKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37790. using UniqueDeferredOperationKHR = UniqueHandle<DeferredOperationKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37791. template <typename Dispatch> class UniqueHandleTraits<DescriptorPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37792. using UniqueDescriptorPool = UniqueHandle<DescriptorPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37793. template <typename Dispatch> class UniqueHandleTraits<DescriptorSet, Dispatch> { public: using deleter = PoolFree<Device, DescriptorPool, Dispatch>; };
  37794. using UniqueDescriptorSet = UniqueHandle<DescriptorSet, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37795. template <typename Dispatch> class UniqueHandleTraits<DescriptorSetLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37796. using UniqueDescriptorSetLayout = UniqueHandle<DescriptorSetLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37797. template <typename Dispatch> class UniqueHandleTraits<DescriptorUpdateTemplate, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37798. using UniqueDescriptorUpdateTemplate = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37799. using UniqueDescriptorUpdateTemplateKHR = UniqueHandle<DescriptorUpdateTemplate, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37800. template <typename Dispatch> class UniqueHandleTraits<DeviceMemory, Dispatch> { public: using deleter = ObjectFree<Device, Dispatch>; };
  37801. using UniqueDeviceMemory = UniqueHandle<DeviceMemory, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37802. template <typename Dispatch> class UniqueHandleTraits<Event, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37803. using UniqueEvent = UniqueHandle<Event, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37804. template <typename Dispatch> class UniqueHandleTraits<Fence, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37805. using UniqueFence = UniqueHandle<Fence, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37806. template <typename Dispatch> class UniqueHandleTraits<Framebuffer, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37807. using UniqueFramebuffer = UniqueHandle<Framebuffer, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37808. template <typename Dispatch> class UniqueHandleTraits<Image, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37809. using UniqueImage = UniqueHandle<Image, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37810. template <typename Dispatch> class UniqueHandleTraits<ImageView, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37811. using UniqueImageView = UniqueHandle<ImageView, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37812. template <typename Dispatch> class UniqueHandleTraits<IndirectCommandsLayoutNV, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37813. using UniqueIndirectCommandsLayoutNV = UniqueHandle<IndirectCommandsLayoutNV, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37814. template <typename Dispatch> class UniqueHandleTraits<Pipeline, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37815. using UniquePipeline = UniqueHandle<Pipeline, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37816. template <typename Dispatch> class UniqueHandleTraits<PipelineCache, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37817. using UniquePipelineCache = UniqueHandle<PipelineCache, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37818. template <typename Dispatch> class UniqueHandleTraits<PipelineLayout, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37819. using UniquePipelineLayout = UniqueHandle<PipelineLayout, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37820. template <typename Dispatch> class UniqueHandleTraits<PrivateDataSlotEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37821. using UniquePrivateDataSlotEXT = UniqueHandle<PrivateDataSlotEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37822. template <typename Dispatch> class UniqueHandleTraits<QueryPool, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37823. using UniqueQueryPool = UniqueHandle<QueryPool, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37824. template <typename Dispatch> class UniqueHandleTraits<RenderPass, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37825. using UniqueRenderPass = UniqueHandle<RenderPass, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37826. template <typename Dispatch> class UniqueHandleTraits<Sampler, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37827. using UniqueSampler = UniqueHandle<Sampler, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37828. template <typename Dispatch> class UniqueHandleTraits<SamplerYcbcrConversion, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37829. using UniqueSamplerYcbcrConversion = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37830. using UniqueSamplerYcbcrConversionKHR = UniqueHandle<SamplerYcbcrConversion, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37831. template <typename Dispatch> class UniqueHandleTraits<Semaphore, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37832. using UniqueSemaphore = UniqueHandle<Semaphore, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37833. template <typename Dispatch> class UniqueHandleTraits<ShaderModule, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37834. using UniqueShaderModule = UniqueHandle<ShaderModule, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37835. template <typename Dispatch> class UniqueHandleTraits<SwapchainKHR, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37836. using UniqueSwapchainKHR = UniqueHandle<SwapchainKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37837. template <typename Dispatch> class UniqueHandleTraits<ValidationCacheEXT, Dispatch> { public: using deleter = ObjectDestroy<Device, Dispatch>; };
  37838. using UniqueValidationCacheEXT = UniqueHandle<ValidationCacheEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  37839. #endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  37840. class Device
  37841. {
  37842. public:
  37843. using CType = VkDevice;
  37844. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDevice;
  37845. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice;
  37846. public:
  37847. VULKAN_HPP_CONSTEXPR Device() VULKAN_HPP_NOEXCEPT
  37848. : m_device(VK_NULL_HANDLE)
  37849. {}
  37850. VULKAN_HPP_CONSTEXPR Device( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  37851. : m_device(VK_NULL_HANDLE)
  37852. {}
  37853. VULKAN_HPP_TYPESAFE_EXPLICIT Device( VkDevice device ) VULKAN_HPP_NOEXCEPT
  37854. : m_device( device )
  37855. {}
  37856. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  37857. Device & operator=(VkDevice device) VULKAN_HPP_NOEXCEPT
  37858. {
  37859. m_device = device;
  37860. return *this;
  37861. }
  37862. #endif
  37863. Device & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  37864. {
  37865. m_device = VK_NULL_HANDLE;
  37866. return *this;
  37867. }
  37868. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  37869. auto operator<=>( Device const& ) const = default;
  37870. #else
  37871. bool operator==( Device const & rhs ) const VULKAN_HPP_NOEXCEPT
  37872. {
  37873. return m_device == rhs.m_device;
  37874. }
  37875. bool operator!=(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
  37876. {
  37877. return m_device != rhs.m_device;
  37878. }
  37879. bool operator<(Device const & rhs ) const VULKAN_HPP_NOEXCEPT
  37880. {
  37881. return m_device < rhs.m_device;
  37882. }
  37883. #endif
  37884. #ifdef VK_USE_PLATFORM_WIN32_KHR
  37885. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37886. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37887. VULKAN_HPP_NODISCARD Result acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37888. #else
  37889. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37890. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37891. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37892. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  37893. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37894. VULKAN_HPP_NODISCARD Result acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37895. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37896. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37897. VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37898. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37899. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37900. VULKAN_HPP_NODISCARD Result acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37901. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37902. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37903. VULKAN_HPP_NODISCARD ResultValue<uint32_t> acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37904. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37905. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37906. VULKAN_HPP_NODISCARD Result acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37907. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37908. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37909. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37910. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  37911. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37912. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37913. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  37914. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37915. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37916. VULKAN_HPP_NODISCARD Result acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37917. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37918. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37919. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37920. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37921. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37922. VULKAN_HPP_NODISCARD Result allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37923. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37924. template <typename CommandBufferAllocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37925. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37926. template <typename CommandBufferAllocator = std::allocator<CommandBuffer>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type = 0>
  37927. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37928. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  37929. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>>
  37930. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37931. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename CommandBufferAllocator = std::allocator<UniqueHandle<CommandBuffer, Dispatch>>, typename B = CommandBufferAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type = 0>
  37932. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37933. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  37934. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37935. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37936. VULKAN_HPP_NODISCARD Result allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37937. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37938. template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37939. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37940. template <typename DescriptorSetAllocator = std::allocator<DescriptorSet>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type = 0>
  37941. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37942. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  37943. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>>
  37944. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37945. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename DescriptorSetAllocator = std::allocator<UniqueHandle<DescriptorSet, Dispatch>>, typename B = DescriptorSetAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type = 0>
  37946. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37947. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  37948. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37949. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37950. VULKAN_HPP_NODISCARD Result allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37951. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37952. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37953. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37954. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  37955. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37956. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37957. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  37958. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37959. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37960. VULKAN_HPP_NODISCARD Result bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37961. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37962. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37963. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37964. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37965. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37966. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37967. VULKAN_HPP_NODISCARD Result bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37968. #else
  37969. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37970. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37971. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37972. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37973. VULKAN_HPP_NODISCARD Result bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37974. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37975. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37976. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37977. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37978. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37979. VULKAN_HPP_NODISCARD Result bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37980. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37981. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37982. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37983. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37984. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37985. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37986. VULKAN_HPP_NODISCARD Result bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37987. #else
  37988. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37989. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37990. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37991. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37992. VULKAN_HPP_NODISCARD Result bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37993. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  37994. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37995. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  37996. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  37997. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  37998. VULKAN_HPP_NODISCARD Result bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  37999. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38000. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38001. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38002. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38003. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38004. VULKAN_HPP_NODISCARD Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38005. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38006. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38007. Result buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  38008. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38009. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38010. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38011. VULKAN_HPP_NODISCARD Result compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38012. #else
  38013. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38014. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38015. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38016. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38017. VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38018. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38019. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38020. VULKAN_HPP_NODISCARD Result copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38021. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38022. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38023. VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38024. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38025. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38026. VULKAN_HPP_NODISCARD Result copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38027. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38028. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38029. VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38030. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38031. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38032. VULKAN_HPP_NODISCARD Result copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38033. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38034. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38035. VULKAN_HPP_NODISCARD Result createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38036. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38037. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38038. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38039. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38040. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38041. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38042. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38043. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38044. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38045. VULKAN_HPP_NODISCARD Result createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38046. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38047. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38048. typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38049. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38050. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38051. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38052. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38053. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38054. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38055. VULKAN_HPP_NODISCARD Result createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38056. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38057. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38058. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38059. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38060. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38061. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38062. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38063. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38064. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38065. VULKAN_HPP_NODISCARD Result createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38066. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38067. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38068. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38069. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38070. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38071. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38072. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38073. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38074. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38075. VULKAN_HPP_NODISCARD Result createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38076. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38077. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38078. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38079. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38080. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38081. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38082. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38083. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38084. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38085. VULKAN_HPP_NODISCARD Result createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38086. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38087. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38088. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38089. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
  38090. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38091. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38092. VULKAN_HPP_NODISCARD ResultValue<Pipeline> createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38093. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38094. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
  38095. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38096. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
  38097. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38098. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38099. VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38100. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38101. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38102. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38103. VULKAN_HPP_NODISCARD Result createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38104. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38105. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38106. typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38107. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38108. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38109. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38110. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38111. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38112. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38113. VULKAN_HPP_NODISCARD Result createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38114. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38115. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38116. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38117. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38118. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38119. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38120. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38121. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38122. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38123. VULKAN_HPP_NODISCARD Result createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38124. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38125. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38126. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38127. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38128. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38129. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38130. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38131. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38132. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38133. VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38134. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38135. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38136. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38137. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38138. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38139. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38140. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38141. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38142. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38143. VULKAN_HPP_NODISCARD Result createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38144. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38145. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38146. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38147. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38148. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38149. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38150. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38151. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38152. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38153. VULKAN_HPP_NODISCARD Result createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38154. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38155. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38156. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38157. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38158. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38159. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38160. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38161. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38162. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38163. VULKAN_HPP_NODISCARD Result createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38164. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38165. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38166. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38167. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38168. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38169. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38170. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38171. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38172. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38173. VULKAN_HPP_NODISCARD Result createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38174. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38175. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38176. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38177. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38178. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38179. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38180. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38181. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38182. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38183. VULKAN_HPP_NODISCARD Result createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38184. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38185. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38186. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38187. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
  38188. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38189. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38190. VULKAN_HPP_NODISCARD ResultValue<Pipeline> createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38191. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38192. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
  38193. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38194. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
  38195. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38196. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38197. VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38198. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38199. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38200. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38201. VULKAN_HPP_NODISCARD Result createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38202. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38203. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38204. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38205. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38206. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38207. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38208. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38209. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38210. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38211. VULKAN_HPP_NODISCARD Result createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38212. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38213. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38214. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38215. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38216. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38217. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38218. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38219. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38220. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38221. VULKAN_HPP_NODISCARD Result createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38222. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38223. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38224. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38225. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38226. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38227. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38228. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38229. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38230. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38231. VULKAN_HPP_NODISCARD Result createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38232. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38233. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38234. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38235. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38236. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38237. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38238. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38239. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38240. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38241. VULKAN_HPP_NODISCARD Result createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38242. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38243. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38244. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38245. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38246. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38247. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38248. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38249. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38250. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38251. VULKAN_HPP_NODISCARD Result createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38252. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38253. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38254. typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38255. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38256. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38257. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38258. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38259. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38260. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38261. VULKAN_HPP_NODISCARD Result createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38262. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38263. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38264. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38265. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38266. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38267. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38268. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38269. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38270. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38271. VULKAN_HPP_NODISCARD Result createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38272. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38273. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38274. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38275. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
  38276. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38277. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38278. VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38279. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38280. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
  38281. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38282. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
  38283. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38284. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38285. VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38286. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38287. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38288. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38289. VULKAN_HPP_NODISCARD Result createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38290. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38291. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38292. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38293. template <typename PipelineAllocator = std::allocator<Pipeline>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type = 0>
  38294. VULKAN_HPP_NODISCARD ResultValue<std::vector<Pipeline, PipelineAllocator>> createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38295. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38296. VULKAN_HPP_NODISCARD ResultValue<Pipeline> createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38297. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38298. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>>
  38299. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38300. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename PipelineAllocator = std::allocator<UniqueHandle<Pipeline, Dispatch>>, typename B = PipelineAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type = 0>
  38301. VULKAN_HPP_NODISCARD ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38302. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38303. VULKAN_HPP_NODISCARD ResultValue<UniqueHandle<Pipeline, Dispatch>> createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38304. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38305. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38306. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38307. VULKAN_HPP_NODISCARD Result createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38308. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38309. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38310. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38311. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38312. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38313. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38314. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38315. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38316. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38317. VULKAN_HPP_NODISCARD Result createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38318. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38319. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38320. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38321. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38322. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38323. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38324. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38325. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38326. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38327. VULKAN_HPP_NODISCARD Result createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38328. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38329. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38330. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38331. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38332. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38333. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38334. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38335. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38336. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38337. VULKAN_HPP_NODISCARD Result createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38338. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38339. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38340. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38341. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38342. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38343. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38344. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38345. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38346. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38347. VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38348. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38349. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38350. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38351. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38352. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38353. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38354. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38355. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38356. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38357. VULKAN_HPP_NODISCARD Result createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38358. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38359. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38360. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38361. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38362. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38363. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38364. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38365. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38366. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38367. VULKAN_HPP_NODISCARD Result createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38368. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38369. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38370. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38371. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38372. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38373. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38374. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38375. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38376. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38377. VULKAN_HPP_NODISCARD Result createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38378. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38379. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38380. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38381. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38382. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38383. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38384. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38385. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38386. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38387. VULKAN_HPP_NODISCARD Result createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38388. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38389. template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38390. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38391. template <typename SwapchainKHRAllocator = std::allocator<SwapchainKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type = 0>
  38392. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38393. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38394. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<SwapchainKHR>::type createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38395. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38396. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>>
  38397. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38398. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename SwapchainKHRAllocator = std::allocator<UniqueHandle<SwapchainKHR, Dispatch>>, typename B = SwapchainKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type = 0>
  38399. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38400. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38401. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38402. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38403. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38404. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38405. VULKAN_HPP_NODISCARD Result createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38406. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38407. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38408. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38409. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38410. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38411. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38412. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38413. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38414. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38415. VULKAN_HPP_NODISCARD Result createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38416. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38417. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38418. typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38419. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  38420. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38421. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38422. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  38423. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38424. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38425. VULKAN_HPP_NODISCARD Result debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38426. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38427. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38428. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38429. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38430. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38431. VULKAN_HPP_NODISCARD Result debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38432. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38433. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38434. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38435. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38436. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38437. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38438. VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38439. #else
  38440. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38441. VULKAN_HPP_NODISCARD Result deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38442. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38443. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38444. void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38445. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38446. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38447. void destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38448. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38449. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38450. void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38451. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38452. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38453. void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38454. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38455. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38456. void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38457. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38458. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38459. void destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38460. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38461. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38462. void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38463. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38464. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38465. void destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38466. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38467. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38468. void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38469. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38470. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38471. void destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38472. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38473. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38474. void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38475. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38476. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38477. void destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38479. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38480. void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38481. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38482. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38483. void destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38484. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38485. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38486. void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38487. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38488. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38489. void destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38490. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38491. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38492. void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38493. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38494. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38495. void destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38496. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38497. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38498. void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38499. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38500. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38501. void destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38502. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38503. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38504. void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38505. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38506. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38507. void destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38508. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38509. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38510. void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38511. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38512. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38513. void destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38514. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38515. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38516. void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38517. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38518. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38519. void destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38520. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38521. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38522. void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38523. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38524. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38525. void destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38526. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38527. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38528. void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38529. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38530. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38531. void destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38532. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38533. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38534. void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38535. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38536. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38537. void destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38538. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38539. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38540. void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38541. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38542. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38543. void destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38544. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38545. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38546. void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38547. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38548. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38549. void destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38550. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38551. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38552. void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38553. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38554. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38555. void destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38556. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38557. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38558. void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38559. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38560. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38561. void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38562. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38563. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38564. void destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38565. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38566. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38567. void destroyEvent( VULKAN_HPP_NAMESPACE::Event event VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38568. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38569. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38570. void destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38571. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38572. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38573. void destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38574. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38575. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38576. void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38577. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38578. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38579. void destroyFence( VULKAN_HPP_NAMESPACE::Fence fence VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38580. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38581. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38582. void destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38583. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38584. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38585. void destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38586. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38587. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38588. void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38589. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38590. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38591. void destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38592. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38593. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38594. void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38595. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38596. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38597. void destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38598. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38599. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38600. void destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38601. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38602. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38603. void destroyImage( VULKAN_HPP_NAMESPACE::Image image VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38604. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38605. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38606. void destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38607. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38608. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38609. void destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38610. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38611. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38612. void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38613. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38614. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38615. void destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38616. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38617. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38618. void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38619. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38620. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38621. void destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38622. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38623. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38624. void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38625. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38626. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38627. void destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38628. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38629. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38630. void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38631. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38632. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38633. void destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38634. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38635. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38636. void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38637. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38638. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38639. void destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38640. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38641. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38642. void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38643. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38644. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38645. void destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38646. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38647. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38648. void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38649. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38650. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38651. void destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38652. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38653. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38654. void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38655. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38656. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38657. void destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38658. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38659. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38660. void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38661. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38662. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38663. void destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38664. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38665. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38666. void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38667. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38668. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38669. void destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38670. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38671. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38672. void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38673. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38674. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38675. void destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38676. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38677. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38678. void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38679. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38680. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38681. void destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38682. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38683. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38684. void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38685. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38686. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38687. void destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38688. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38689. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38690. void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38691. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38692. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38693. void destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38694. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38695. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38696. void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38697. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38698. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38699. void destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38700. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38701. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38702. void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38703. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38704. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38705. void destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38706. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38707. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38708. void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38709. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38710. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38711. void destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38712. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38713. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38714. void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38715. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38716. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38717. void destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38718. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38719. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38720. void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38721. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38722. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38723. void destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38724. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38725. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38726. void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38727. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38728. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38729. void destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38730. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38731. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38732. void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38733. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38734. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38735. void destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38736. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38737. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38738. void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38739. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38740. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38741. void destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38742. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38743. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38744. void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38745. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38746. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38747. void destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38748. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38749. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38750. void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38751. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38752. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38753. void destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38754. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38755. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38756. void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38757. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38758. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38759. void destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38760. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38761. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38762. void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38763. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38764. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38765. void destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38766. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38767. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38768. void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38769. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38770. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38771. void destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38772. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38773. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38774. void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38775. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38776. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38777. void destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38778. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38779. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38780. void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38781. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38782. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38783. void destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38784. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38785. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38786. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38787. VULKAN_HPP_NODISCARD Result waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38788. #else
  38789. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38790. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type waitIdle( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38791. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38792. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38793. VULKAN_HPP_NODISCARD Result displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38794. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38795. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38796. typename ResultValueType<void>::type displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38797. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38798. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38799. VULKAN_HPP_NODISCARD Result flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38800. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38801. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38802. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38803. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38804. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38805. void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38806. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38807. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38808. void freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38809. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38810. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38811. void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38812. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38813. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38814. void free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38815. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38816. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38817. Result freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38818. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38819. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38820. typename ResultValueType<void>::type freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38821. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38822. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38823. Result free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38824. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38825. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38826. typename ResultValueType<void>::type free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38827. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38828. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38829. void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38830. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38831. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38832. void freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38833. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38834. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38835. void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38836. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38837. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38838. void free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38839. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38840. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38841. void getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR* pSizeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38842. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38843. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38844. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  38845. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38846. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38847. DeviceAddress getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38848. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38849. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38850. DeviceAddress getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38851. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38852. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38853. VULKAN_HPP_NODISCARD Result getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38854. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38855. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38856. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38857. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38858. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38859. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38860. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38861. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38862. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38863. void getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38864. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38865. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38866. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38867. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38868. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38869. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38870. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  38871. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38872. VULKAN_HPP_NODISCARD Result getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38873. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38874. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38875. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38876. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38877. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38878. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38879. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  38880. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38881. DeviceAddress getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38882. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38883. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38884. DeviceAddress getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38885. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38886. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38887. DeviceAddress getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38888. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38889. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38890. DeviceAddress getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38891. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38892. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38893. DeviceAddress getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38894. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38895. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38896. DeviceAddress getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38897. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38898. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38899. void getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38900. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38901. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38902. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38903. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38904. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38905. void getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38906. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38907. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38908. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38909. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38910. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38911. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38912. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38913. void getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38914. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38915. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38916. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38917. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38918. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38919. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38920. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38921. uint64_t getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38922. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38923. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38924. uint64_t getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38925. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38926. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38927. uint64_t getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38928. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38929. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38930. uint64_t getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38931. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38932. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38933. VULKAN_HPP_NODISCARD Result getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38934. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38935. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38936. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38937. template <typename Uint64_tAllocator = std::allocator<uint64_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38938. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38939. template <typename Uint64_tAllocator = std::allocator<uint64_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Uint64_tAllocator, typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type = 0>
  38940. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Uint64_tAllocator & uint64_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38941. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38942. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38943. uint32_t getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38944. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38945. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38946. VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38947. #else
  38948. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38949. VULKAN_HPP_NODISCARD Result getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38950. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38951. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38952. void getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38953. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38954. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38955. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38956. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38957. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38958. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38959. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38960. void getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38961. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38962. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38963. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38964. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38965. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38966. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38967. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38968. void getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR* pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR* pCompatibility, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38969. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38970. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38971. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38972. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38973. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38974. void getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38975. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38976. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38977. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38978. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38979. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38980. void getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38981. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38982. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38983. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38984. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38985. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38986. VULKAN_HPP_NODISCARD Result getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38987. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38988. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38989. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type getGroupPresentCapabilitiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38990. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38991. #ifdef VK_USE_PLATFORM_WIN32_KHR
  38992. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38993. VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  38994. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  38995. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  38996. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  38997. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  38998. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  38999. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39000. VULKAN_HPP_NODISCARD Result getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39001. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39002. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39003. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39004. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39005. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39006. void getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39007. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39008. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39009. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::DeviceSize getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39010. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39011. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39012. uint64_t getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39013. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39014. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39015. uint64_t getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39016. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39017. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39018. uint64_t getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39019. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39020. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39021. uint64_t getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39022. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39023. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39024. PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39025. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39026. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39027. PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39028. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39029. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39030. void getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39031. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39032. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39033. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39034. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39035. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39036. void getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39037. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39038. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39039. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Queue getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39040. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39041. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39042. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39043. VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39044. #else
  39045. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39046. VULKAN_HPP_NODISCARD Result getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39047. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39048. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39049. VULKAN_HPP_NODISCARD Result getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39050. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39051. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39052. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39053. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39054. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39055. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39056. VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39057. #else
  39058. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39059. VULKAN_HPP_NODISCARD Result getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39060. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39061. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39062. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39063. VULKAN_HPP_NODISCARD Result getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39064. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39065. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39066. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39067. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39068. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39069. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39070. void getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39071. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39072. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39073. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39074. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39075. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39076. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39077. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39078. VULKAN_HPP_NODISCARD Result getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39079. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39080. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39081. typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39082. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39083. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39084. void getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39085. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39086. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39087. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39088. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39089. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39090. void getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39091. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39092. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39093. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39094. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39095. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39096. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39097. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39098. void getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39099. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39100. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39101. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MemoryRequirements2 getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39102. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39103. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39104. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39105. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39106. void getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39107. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39108. template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39109. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39110. template <typename SparseImageMemoryRequirementsAllocator = std::allocator<SparseImageMemoryRequirements>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirementsAllocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type = 0>
  39111. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39112. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39113. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39114. void getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39115. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39116. template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39117. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39118. template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
  39119. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39120. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39121. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39122. void getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39123. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39124. template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39125. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39126. template <typename SparseImageMemoryRequirements2Allocator = std::allocator<SparseImageMemoryRequirements2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageMemoryRequirements2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type = 0>
  39127. VULKAN_HPP_NODISCARD std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39128. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39129. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39130. void getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39131. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39132. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39133. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::SubresourceLayout getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39134. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39135. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39136. VULKAN_HPP_NODISCARD Result getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39137. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39138. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39139. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39140. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39141. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39142. uint32_t getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39143. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39144. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39145. uint32_t getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39146. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39147. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  39148. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39149. VULKAN_HPP_NODISCARD Result getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39150. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39151. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39152. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<struct AHardwareBuffer*>::type getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39153. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39154. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  39155. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39156. VULKAN_HPP_NODISCARD Result getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39157. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39158. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39159. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39160. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39161. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39162. VULKAN_HPP_NODISCARD Result getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39163. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39164. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39165. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39166. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39167. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39168. VULKAN_HPP_NODISCARD Result getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39170. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39171. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39172. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39173. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39174. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39175. VULKAN_HPP_NODISCARD Result getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39176. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39177. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39178. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39179. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39180. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39181. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39182. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39183. VULKAN_HPP_NODISCARD Result getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39184. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39185. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39186. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39187. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39188. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39189. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39190. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39191. VULKAN_HPP_NODISCARD Result getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39192. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39193. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39194. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39195. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39196. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39197. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39198. VULKAN_HPP_NODISCARD Result getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39199. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39200. template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39201. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39202. template <typename PastPresentationTimingGOOGLEAllocator = std::allocator<PastPresentationTimingGOOGLE>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PastPresentationTimingGOOGLEAllocator, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type = 0>
  39203. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39204. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39205. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39206. VULKAN_HPP_NODISCARD Result getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39207. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39208. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39209. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39210. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39211. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39212. VULKAN_HPP_NODISCARD Result getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39213. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39214. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39215. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39216. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
  39217. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39218. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39219. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39220. VULKAN_HPP_NODISCARD Result getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39221. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39222. template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39223. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39224. template <typename PipelineExecutableInternalRepresentationKHRAllocator = std::allocator<PipelineExecutableInternalRepresentationKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineExecutableInternalRepresentationKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type = 0>
  39225. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39226. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39227. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39228. VULKAN_HPP_NODISCARD Result getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39229. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39230. template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39231. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39232. template <typename PipelineExecutablePropertiesKHRAllocator = std::allocator<PipelineExecutablePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineExecutablePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type = 0>
  39233. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39234. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39235. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39236. VULKAN_HPP_NODISCARD Result getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39237. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39238. template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39239. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39240. template <typename PipelineExecutableStatisticKHRAllocator = std::allocator<PipelineExecutableStatisticKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PipelineExecutableStatisticKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type = 0>
  39241. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39242. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39243. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39244. void getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39245. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39246. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39247. VULKAN_HPP_NODISCARD uint64_t getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39248. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39249. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39250. VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39251. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39252. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39253. VULKAN_HPP_NODISCARD Result getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39254. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39255. VULKAN_HPP_NODISCARD ResultValue<std::vector<T,Allocator>> getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39256. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39257. VULKAN_HPP_NODISCARD ResultValue<T> getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39258. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39259. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39260. VULKAN_HPP_NODISCARD Result getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39261. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39262. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39263. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39264. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39265. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39266. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39267. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39268. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39269. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39270. VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39271. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39272. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39273. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39274. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39275. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39276. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39277. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39278. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39279. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39280. VULKAN_HPP_NODISCARD Result getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39281. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39282. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39283. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39284. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39285. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39286. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39287. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39288. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39289. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39290. DeviceSize getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39291. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39292. VULKAN_HPP_NODISCARD Result getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39293. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39294. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39295. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39296. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39297. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39298. void getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39299. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39300. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39301. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::Extent2D getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39302. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39303. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39304. VULKAN_HPP_NODISCARD Result getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39305. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39306. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39307. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39308. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39309. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39310. VULKAN_HPP_NODISCARD Result getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39311. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39312. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39313. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39314. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39315. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39316. VULKAN_HPP_NODISCARD Result getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39317. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39318. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39319. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<int>::type getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39320. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39321. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39322. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39323. VULKAN_HPP_NODISCARD Result getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39324. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39325. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39326. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<HANDLE>::type getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39327. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39328. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39329. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39330. VULKAN_HPP_NODISCARD Result getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39331. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39332. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39333. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39334. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
  39335. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39336. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39337. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39338. VULKAN_HPP_NODISCARD Result getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39339. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39340. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39341. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<uint64_t>::type getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39342. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39343. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39344. VULKAN_HPP_NODISCARD Result getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39345. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39346. template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39347. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39348. template <typename ImageAllocator = std::allocator<Image>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = ImageAllocator, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type = 0>
  39349. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Image, ImageAllocator>>::type getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39350. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39351. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39352. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39353. VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39354. #else
  39355. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39356. VULKAN_HPP_NODISCARD Result getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39357. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39358. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39359. VULKAN_HPP_NODISCARD Result getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39360. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39361. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39362. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39363. template <typename Uint8_tAllocator = std::allocator<uint8_t>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Uint8_tAllocator, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type = 0>
  39364. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39365. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39366. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39367. VULKAN_HPP_NODISCARD Result importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39368. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39369. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39370. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39371. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39372. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39373. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39374. VULKAN_HPP_NODISCARD Result importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39375. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39376. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39377. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39378. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39379. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39380. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39381. VULKAN_HPP_NODISCARD Result importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39382. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39383. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39384. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39385. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39386. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39387. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39388. VULKAN_HPP_NODISCARD Result importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39389. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39390. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39391. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39392. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39393. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39394. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39395. VULKAN_HPP_NODISCARD Result initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39396. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39397. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39398. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39399. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39400. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39401. VULKAN_HPP_NODISCARD Result invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39402. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39403. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39404. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39405. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39406. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39407. VULKAN_HPP_NODISCARD Result mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39408. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39409. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39410. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void*>::type mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39411. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39412. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39413. VULKAN_HPP_NODISCARD Result mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39414. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39415. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39416. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39417. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39418. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39419. VULKAN_HPP_NODISCARD Result mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39420. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39421. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39422. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39423. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39424. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39425. VULKAN_HPP_NODISCARD Result registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39426. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39427. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39428. typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39429. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  39430. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39431. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39432. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  39433. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39434. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39435. VULKAN_HPP_NODISCARD Result registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39436. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39437. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39438. typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39439. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  39440. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39441. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39442. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  39443. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39444. #ifdef VK_USE_PLATFORM_WIN32_KHR
  39445. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39446. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39447. VULKAN_HPP_NODISCARD Result releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39448. #else
  39449. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39450. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39451. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39452. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  39453. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39454. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39455. VULKAN_HPP_NODISCARD Result releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39456. #else
  39457. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39458. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39459. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39460. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39461. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39462. VULKAN_HPP_NODISCARD Result release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39463. #else
  39464. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39465. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39466. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39467. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39468. void releaseProfilingLockKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39469. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39470. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39471. VULKAN_HPP_NODISCARD Result resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39472. #else
  39473. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39474. typename ResultValueType<void>::type resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39475. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39476. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39477. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39478. Result resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39479. #else
  39480. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39481. typename ResultValueType<void>::type resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39482. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39483. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39484. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39485. VULKAN_HPP_NODISCARD Result resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39486. #else
  39487. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39488. typename ResultValueType<void>::type resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39489. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39490. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39491. VULKAN_HPP_NODISCARD Result resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39492. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39493. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39494. typename ResultValueType<void>::type resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39495. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39496. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39497. void resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39498. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39499. void resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39500. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39501. VULKAN_HPP_NODISCARD Result setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39502. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39503. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39504. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39505. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39506. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39507. VULKAN_HPP_NODISCARD Result setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39508. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39509. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39510. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39511. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39512. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39513. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39514. VULKAN_HPP_NODISCARD Result setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39515. #else
  39516. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39517. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39518. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39519. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39520. void setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39521. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39522. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39523. void setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS;
  39524. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39525. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39526. void setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39527. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39528. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39529. VULKAN_HPP_NODISCARD Result setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39530. #else
  39531. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39532. typename ResultValueType<void>::type setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39533. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39534. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39535. VULKAN_HPP_NODISCARD Result signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39536. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39537. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39538. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39539. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39540. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39541. VULKAN_HPP_NODISCARD Result signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39542. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39543. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39544. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39545. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39546. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39547. void trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39548. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39549. void trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39550. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39551. void uninitializePerformanceApiINTEL( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39552. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39553. void unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39554. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39555. void updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39556. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39557. void updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39558. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39559. void updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39560. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39561. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39562. void updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39563. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39564. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39565. VULKAN_HPP_NODISCARD Result waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39566. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39567. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39568. VULKAN_HPP_NODISCARD Result waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39569. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39570. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39571. VULKAN_HPP_NODISCARD Result waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39572. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39573. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39574. VULKAN_HPP_NODISCARD Result waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39575. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39576. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39577. VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39578. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39579. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39580. VULKAN_HPP_NODISCARD Result waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39581. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39582. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39583. VULKAN_HPP_NODISCARD Result writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  39584. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  39585. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39586. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39587. template <typename T, typename Allocator = std::allocator<T>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39588. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<std::vector<T,Allocator>>::type writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39589. template <typename T, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  39590. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<T>::type writeAccelerationStructuresPropertyKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  39591. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  39592. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDevice() const VULKAN_HPP_NOEXCEPT
  39593. {
  39594. return m_device;
  39595. }
  39596. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  39597. {
  39598. return m_device != VK_NULL_HANDLE;
  39599. }
  39600. bool operator!() const VULKAN_HPP_NOEXCEPT
  39601. {
  39602. return m_device == VK_NULL_HANDLE;
  39603. }
  39604. private:
  39605. VkDevice m_device;
  39606. };
  39607. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
  39608. template <>
  39609. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDevice>
  39610. {
  39611. using type = VULKAN_HPP_NAMESPACE::Device;
  39612. };
  39613. template <>
  39614. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDevice>
  39615. {
  39616. using Type = VULKAN_HPP_NAMESPACE::Device;
  39617. };
  39618. template <>
  39619. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDevice>
  39620. {
  39621. using Type = VULKAN_HPP_NAMESPACE::Device;
  39622. };
  39623. template <>
  39624. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Device>
  39625. {
  39626. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  39627. };
  39628. struct DisplayModeParametersKHR
  39629. {
  39630. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39631. VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR(VULKAN_HPP_NAMESPACE::Extent2D visibleRegion_ = {}, uint32_t refreshRate_ = {}) VULKAN_HPP_NOEXCEPT
  39632. : visibleRegion( visibleRegion_ ), refreshRate( refreshRate_ )
  39633. {}
  39634. VULKAN_HPP_CONSTEXPR DisplayModeParametersKHR( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39635. DisplayModeParametersKHR( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39636. : DisplayModeParametersKHR( *reinterpret_cast<DisplayModeParametersKHR const *>( &rhs ) )
  39637. {}
  39638. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39639. VULKAN_HPP_CONSTEXPR_14 DisplayModeParametersKHR & operator=( DisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39640. DisplayModeParametersKHR & operator=( VkDisplayModeParametersKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39641. {
  39642. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const *>( &rhs );
  39643. return *this;
  39644. }
  39645. DisplayModeParametersKHR & setVisibleRegion( VULKAN_HPP_NAMESPACE::Extent2D const & visibleRegion_ ) VULKAN_HPP_NOEXCEPT
  39646. {
  39647. visibleRegion = visibleRegion_;
  39648. return *this;
  39649. }
  39650. DisplayModeParametersKHR & setRefreshRate( uint32_t refreshRate_ ) VULKAN_HPP_NOEXCEPT
  39651. {
  39652. refreshRate = refreshRate_;
  39653. return *this;
  39654. }
  39655. operator VkDisplayModeParametersKHR const&() const VULKAN_HPP_NOEXCEPT
  39656. {
  39657. return *reinterpret_cast<const VkDisplayModeParametersKHR*>( this );
  39658. }
  39659. operator VkDisplayModeParametersKHR &() VULKAN_HPP_NOEXCEPT
  39660. {
  39661. return *reinterpret_cast<VkDisplayModeParametersKHR*>( this );
  39662. }
  39663. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39664. auto operator<=>( DisplayModeParametersKHR const& ) const = default;
  39665. #else
  39666. bool operator==( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39667. {
  39668. return ( visibleRegion == rhs.visibleRegion )
  39669. && ( refreshRate == rhs.refreshRate );
  39670. }
  39671. bool operator!=( DisplayModeParametersKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39672. {
  39673. return !operator==( rhs );
  39674. }
  39675. #endif
  39676. public:
  39677. VULKAN_HPP_NAMESPACE::Extent2D visibleRegion = {};
  39678. uint32_t refreshRate = {};
  39679. };
  39680. static_assert( sizeof( DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ), "struct and wrapper have different size!" );
  39681. static_assert( std::is_standard_layout<DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
  39682. struct DisplayModeCreateInfoKHR
  39683. {
  39684. static const bool allowDuplicate = false;
  39685. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeCreateInfoKHR;
  39686. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39687. VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
  39688. : flags( flags_ ), parameters( parameters_ )
  39689. {}
  39690. VULKAN_HPP_CONSTEXPR DisplayModeCreateInfoKHR( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39691. DisplayModeCreateInfoKHR( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39692. : DisplayModeCreateInfoKHR( *reinterpret_cast<DisplayModeCreateInfoKHR const *>( &rhs ) )
  39693. {}
  39694. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39695. VULKAN_HPP_CONSTEXPR_14 DisplayModeCreateInfoKHR & operator=( DisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39696. DisplayModeCreateInfoKHR & operator=( VkDisplayModeCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39697. {
  39698. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR const *>( &rhs );
  39699. return *this;
  39700. }
  39701. DisplayModeCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  39702. {
  39703. pNext = pNext_;
  39704. return *this;
  39705. }
  39706. DisplayModeCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  39707. {
  39708. flags = flags_;
  39709. return *this;
  39710. }
  39711. DisplayModeCreateInfoKHR & setParameters( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR const & parameters_ ) VULKAN_HPP_NOEXCEPT
  39712. {
  39713. parameters = parameters_;
  39714. return *this;
  39715. }
  39716. operator VkDisplayModeCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  39717. {
  39718. return *reinterpret_cast<const VkDisplayModeCreateInfoKHR*>( this );
  39719. }
  39720. operator VkDisplayModeCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  39721. {
  39722. return *reinterpret_cast<VkDisplayModeCreateInfoKHR*>( this );
  39723. }
  39724. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39725. auto operator<=>( DisplayModeCreateInfoKHR const& ) const = default;
  39726. #else
  39727. bool operator==( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39728. {
  39729. return ( sType == rhs.sType )
  39730. && ( pNext == rhs.pNext )
  39731. && ( flags == rhs.flags )
  39732. && ( parameters == rhs.parameters );
  39733. }
  39734. bool operator!=( DisplayModeCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39735. {
  39736. return !operator==( rhs );
  39737. }
  39738. #endif
  39739. public:
  39740. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeCreateInfoKHR;
  39741. const void* pNext = {};
  39742. VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR flags = {};
  39743. VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
  39744. };
  39745. static_assert( sizeof( DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ), "struct and wrapper have different size!" );
  39746. static_assert( std::is_standard_layout<DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  39747. template <>
  39748. struct CppType<StructureType, StructureType::eDisplayModeCreateInfoKHR>
  39749. {
  39750. using Type = DisplayModeCreateInfoKHR;
  39751. };
  39752. class DisplayModeKHR
  39753. {
  39754. public:
  39755. using CType = VkDisplayModeKHR;
  39756. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR;
  39757. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR;
  39758. public:
  39759. VULKAN_HPP_CONSTEXPR DisplayModeKHR() VULKAN_HPP_NOEXCEPT
  39760. : m_displayModeKHR(VK_NULL_HANDLE)
  39761. {}
  39762. VULKAN_HPP_CONSTEXPR DisplayModeKHR( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  39763. : m_displayModeKHR(VK_NULL_HANDLE)
  39764. {}
  39765. VULKAN_HPP_TYPESAFE_EXPLICIT DisplayModeKHR( VkDisplayModeKHR displayModeKHR ) VULKAN_HPP_NOEXCEPT
  39766. : m_displayModeKHR( displayModeKHR )
  39767. {}
  39768. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  39769. DisplayModeKHR & operator=(VkDisplayModeKHR displayModeKHR) VULKAN_HPP_NOEXCEPT
  39770. {
  39771. m_displayModeKHR = displayModeKHR;
  39772. return *this;
  39773. }
  39774. #endif
  39775. DisplayModeKHR & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  39776. {
  39777. m_displayModeKHR = VK_NULL_HANDLE;
  39778. return *this;
  39779. }
  39780. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39781. auto operator<=>( DisplayModeKHR const& ) const = default;
  39782. #else
  39783. bool operator==( DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  39784. {
  39785. return m_displayModeKHR == rhs.m_displayModeKHR;
  39786. }
  39787. bool operator!=(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  39788. {
  39789. return m_displayModeKHR != rhs.m_displayModeKHR;
  39790. }
  39791. bool operator<(DisplayModeKHR const & rhs ) const VULKAN_HPP_NOEXCEPT
  39792. {
  39793. return m_displayModeKHR < rhs.m_displayModeKHR;
  39794. }
  39795. #endif
  39796. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDisplayModeKHR() const VULKAN_HPP_NOEXCEPT
  39797. {
  39798. return m_displayModeKHR;
  39799. }
  39800. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  39801. {
  39802. return m_displayModeKHR != VK_NULL_HANDLE;
  39803. }
  39804. bool operator!() const VULKAN_HPP_NOEXCEPT
  39805. {
  39806. return m_displayModeKHR == VK_NULL_HANDLE;
  39807. }
  39808. private:
  39809. VkDisplayModeKHR m_displayModeKHR;
  39810. };
  39811. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
  39812. template <>
  39813. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDisplayModeKHR>
  39814. {
  39815. using type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  39816. };
  39817. template <>
  39818. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDisplayModeKHR>
  39819. {
  39820. using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  39821. };
  39822. template <>
  39823. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDisplayModeKHR>
  39824. {
  39825. using Type = VULKAN_HPP_NAMESPACE::DisplayModeKHR;
  39826. };
  39827. template <>
  39828. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
  39829. {
  39830. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  39831. };
  39832. struct ExtensionProperties
  39833. {
  39834. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39835. VULKAN_HPP_CONSTEXPR_14 ExtensionProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& extensionName_ = {}, uint32_t specVersion_ = {}) VULKAN_HPP_NOEXCEPT
  39836. : extensionName( extensionName_ ), specVersion( specVersion_ )
  39837. {}
  39838. VULKAN_HPP_CONSTEXPR_14 ExtensionProperties( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39839. ExtensionProperties( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  39840. : ExtensionProperties( *reinterpret_cast<ExtensionProperties const *>( &rhs ) )
  39841. {}
  39842. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39843. VULKAN_HPP_CONSTEXPR_14 ExtensionProperties & operator=( ExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39844. ExtensionProperties & operator=( VkExtensionProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  39845. {
  39846. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExtensionProperties const *>( &rhs );
  39847. return *this;
  39848. }
  39849. operator VkExtensionProperties const&() const VULKAN_HPP_NOEXCEPT
  39850. {
  39851. return *reinterpret_cast<const VkExtensionProperties*>( this );
  39852. }
  39853. operator VkExtensionProperties &() VULKAN_HPP_NOEXCEPT
  39854. {
  39855. return *reinterpret_cast<VkExtensionProperties*>( this );
  39856. }
  39857. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39858. auto operator<=>( ExtensionProperties const& ) const = default;
  39859. #else
  39860. bool operator==( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  39861. {
  39862. return ( extensionName == rhs.extensionName )
  39863. && ( specVersion == rhs.specVersion );
  39864. }
  39865. bool operator!=( ExtensionProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  39866. {
  39867. return !operator==( rhs );
  39868. }
  39869. #endif
  39870. public:
  39871. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> extensionName = {};
  39872. uint32_t specVersion = {};
  39873. };
  39874. static_assert( sizeof( ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
  39875. static_assert( std::is_standard_layout<ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
  39876. struct LayerProperties
  39877. {
  39878. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39879. VULKAN_HPP_CONSTEXPR_14 LayerProperties(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layerName_ = {}, uint32_t specVersion_ = {}, uint32_t implementationVersion_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
  39880. : layerName( layerName_ ), specVersion( specVersion_ ), implementationVersion( implementationVersion_ ), description( description_ )
  39881. {}
  39882. VULKAN_HPP_CONSTEXPR_14 LayerProperties( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39883. LayerProperties( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  39884. : LayerProperties( *reinterpret_cast<LayerProperties const *>( &rhs ) )
  39885. {}
  39886. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39887. VULKAN_HPP_CONSTEXPR_14 LayerProperties & operator=( LayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39888. LayerProperties & operator=( VkLayerProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  39889. {
  39890. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::LayerProperties const *>( &rhs );
  39891. return *this;
  39892. }
  39893. operator VkLayerProperties const&() const VULKAN_HPP_NOEXCEPT
  39894. {
  39895. return *reinterpret_cast<const VkLayerProperties*>( this );
  39896. }
  39897. operator VkLayerProperties &() VULKAN_HPP_NOEXCEPT
  39898. {
  39899. return *reinterpret_cast<VkLayerProperties*>( this );
  39900. }
  39901. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39902. auto operator<=>( LayerProperties const& ) const = default;
  39903. #else
  39904. bool operator==( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  39905. {
  39906. return ( layerName == rhs.layerName )
  39907. && ( specVersion == rhs.specVersion )
  39908. && ( implementationVersion == rhs.implementationVersion )
  39909. && ( description == rhs.description );
  39910. }
  39911. bool operator!=( LayerProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  39912. {
  39913. return !operator==( rhs );
  39914. }
  39915. #endif
  39916. public:
  39917. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layerName = {};
  39918. uint32_t specVersion = {};
  39919. uint32_t implementationVersion = {};
  39920. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  39921. };
  39922. static_assert( sizeof( LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
  39923. static_assert( std::is_standard_layout<LayerProperties>::value, "struct wrapper is not a standard layout!" );
  39924. struct PerformanceCounterKHR
  39925. {
  39926. static const bool allowDuplicate = false;
  39927. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterKHR;
  39928. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39929. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit_ = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric, VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope_ = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer, VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage_ = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32, std::array<uint8_t,VK_UUID_SIZE> const& uuid_ = {}) VULKAN_HPP_NOEXCEPT
  39930. : unit( unit_ ), scope( scope_ ), storage( storage_ ), uuid( uuid_ )
  39931. {}
  39932. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39933. PerformanceCounterKHR( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39934. : PerformanceCounterKHR( *reinterpret_cast<PerformanceCounterKHR const *>( &rhs ) )
  39935. {}
  39936. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39937. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterKHR & operator=( PerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39938. PerformanceCounterKHR & operator=( VkPerformanceCounterKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39939. {
  39940. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR const *>( &rhs );
  39941. return *this;
  39942. }
  39943. operator VkPerformanceCounterKHR const&() const VULKAN_HPP_NOEXCEPT
  39944. {
  39945. return *reinterpret_cast<const VkPerformanceCounterKHR*>( this );
  39946. }
  39947. operator VkPerformanceCounterKHR &() VULKAN_HPP_NOEXCEPT
  39948. {
  39949. return *reinterpret_cast<VkPerformanceCounterKHR*>( this );
  39950. }
  39951. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  39952. auto operator<=>( PerformanceCounterKHR const& ) const = default;
  39953. #else
  39954. bool operator==( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39955. {
  39956. return ( sType == rhs.sType )
  39957. && ( pNext == rhs.pNext )
  39958. && ( unit == rhs.unit )
  39959. && ( scope == rhs.scope )
  39960. && ( storage == rhs.storage )
  39961. && ( uuid == rhs.uuid );
  39962. }
  39963. bool operator!=( PerformanceCounterKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  39964. {
  39965. return !operator==( rhs );
  39966. }
  39967. #endif
  39968. public:
  39969. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterKHR;
  39970. const void* pNext = {};
  39971. VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR unit = VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR::eGeneric;
  39972. VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR scope = VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR::eCommandBuffer;
  39973. VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR storage = VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR::eInt32;
  39974. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> uuid = {};
  39975. };
  39976. static_assert( sizeof( PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ), "struct and wrapper have different size!" );
  39977. static_assert( std::is_standard_layout<PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
  39978. template <>
  39979. struct CppType<StructureType, StructureType::ePerformanceCounterKHR>
  39980. {
  39981. using Type = PerformanceCounterKHR;
  39982. };
  39983. struct PerformanceCounterDescriptionKHR
  39984. {
  39985. static const bool allowDuplicate = false;
  39986. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceCounterDescriptionKHR;
  39987. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39988. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR(VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& name_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& category_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}) VULKAN_HPP_NOEXCEPT
  39989. : flags( flags_ ), name( name_ ), category( category_ ), description( description_ )
  39990. {}
  39991. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39992. PerformanceCounterDescriptionKHR( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39993. : PerformanceCounterDescriptionKHR( *reinterpret_cast<PerformanceCounterDescriptionKHR const *>( &rhs ) )
  39994. {}
  39995. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  39996. VULKAN_HPP_CONSTEXPR_14 PerformanceCounterDescriptionKHR & operator=( PerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  39997. PerformanceCounterDescriptionKHR & operator=( VkPerformanceCounterDescriptionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  39998. {
  39999. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR const *>( &rhs );
  40000. return *this;
  40001. }
  40002. operator VkPerformanceCounterDescriptionKHR const&() const VULKAN_HPP_NOEXCEPT
  40003. {
  40004. return *reinterpret_cast<const VkPerformanceCounterDescriptionKHR*>( this );
  40005. }
  40006. operator VkPerformanceCounterDescriptionKHR &() VULKAN_HPP_NOEXCEPT
  40007. {
  40008. return *reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( this );
  40009. }
  40010. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40011. auto operator<=>( PerformanceCounterDescriptionKHR const& ) const = default;
  40012. #else
  40013. bool operator==( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40014. {
  40015. return ( sType == rhs.sType )
  40016. && ( pNext == rhs.pNext )
  40017. && ( flags == rhs.flags )
  40018. && ( name == rhs.name )
  40019. && ( category == rhs.category )
  40020. && ( description == rhs.description );
  40021. }
  40022. bool operator!=( PerformanceCounterDescriptionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40023. {
  40024. return !operator==( rhs );
  40025. }
  40026. #endif
  40027. public:
  40028. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceCounterDescriptionKHR;
  40029. const void* pNext = {};
  40030. VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR flags = {};
  40031. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> name = {};
  40032. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> category = {};
  40033. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  40034. };
  40035. static_assert( sizeof( PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ), "struct and wrapper have different size!" );
  40036. static_assert( std::is_standard_layout<PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
  40037. template <>
  40038. struct CppType<StructureType, StructureType::ePerformanceCounterDescriptionKHR>
  40039. {
  40040. using Type = PerformanceCounterDescriptionKHR;
  40041. };
  40042. struct DisplayModePropertiesKHR
  40043. {
  40044. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40045. VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters_ = {}) VULKAN_HPP_NOEXCEPT
  40046. : displayMode( displayMode_ ), parameters( parameters_ )
  40047. {}
  40048. VULKAN_HPP_CONSTEXPR DisplayModePropertiesKHR( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40049. DisplayModePropertiesKHR( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40050. : DisplayModePropertiesKHR( *reinterpret_cast<DisplayModePropertiesKHR const *>( &rhs ) )
  40051. {}
  40052. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40053. VULKAN_HPP_CONSTEXPR_14 DisplayModePropertiesKHR & operator=( DisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40054. DisplayModePropertiesKHR & operator=( VkDisplayModePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40055. {
  40056. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR const *>( &rhs );
  40057. return *this;
  40058. }
  40059. operator VkDisplayModePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  40060. {
  40061. return *reinterpret_cast<const VkDisplayModePropertiesKHR*>( this );
  40062. }
  40063. operator VkDisplayModePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  40064. {
  40065. return *reinterpret_cast<VkDisplayModePropertiesKHR*>( this );
  40066. }
  40067. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40068. auto operator<=>( DisplayModePropertiesKHR const& ) const = default;
  40069. #else
  40070. bool operator==( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40071. {
  40072. return ( displayMode == rhs.displayMode )
  40073. && ( parameters == rhs.parameters );
  40074. }
  40075. bool operator!=( DisplayModePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40076. {
  40077. return !operator==( rhs );
  40078. }
  40079. #endif
  40080. public:
  40081. VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
  40082. VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR parameters = {};
  40083. };
  40084. static_assert( sizeof( DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ), "struct and wrapper have different size!" );
  40085. static_assert( std::is_standard_layout<DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  40086. struct DisplayModeProperties2KHR
  40087. {
  40088. static const bool allowDuplicate = false;
  40089. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayModeProperties2KHR;
  40090. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40091. VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties_ = {}) VULKAN_HPP_NOEXCEPT
  40092. : displayModeProperties( displayModeProperties_ )
  40093. {}
  40094. VULKAN_HPP_CONSTEXPR DisplayModeProperties2KHR( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40095. DisplayModeProperties2KHR( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40096. : DisplayModeProperties2KHR( *reinterpret_cast<DisplayModeProperties2KHR const *>( &rhs ) )
  40097. {}
  40098. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40099. VULKAN_HPP_CONSTEXPR_14 DisplayModeProperties2KHR & operator=( DisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40100. DisplayModeProperties2KHR & operator=( VkDisplayModeProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40101. {
  40102. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR const *>( &rhs );
  40103. return *this;
  40104. }
  40105. operator VkDisplayModeProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
  40106. {
  40107. return *reinterpret_cast<const VkDisplayModeProperties2KHR*>( this );
  40108. }
  40109. operator VkDisplayModeProperties2KHR &() VULKAN_HPP_NOEXCEPT
  40110. {
  40111. return *reinterpret_cast<VkDisplayModeProperties2KHR*>( this );
  40112. }
  40113. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40114. auto operator<=>( DisplayModeProperties2KHR const& ) const = default;
  40115. #else
  40116. bool operator==( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40117. {
  40118. return ( sType == rhs.sType )
  40119. && ( pNext == rhs.pNext )
  40120. && ( displayModeProperties == rhs.displayModeProperties );
  40121. }
  40122. bool operator!=( DisplayModeProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40123. {
  40124. return !operator==( rhs );
  40125. }
  40126. #endif
  40127. public:
  40128. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayModeProperties2KHR;
  40129. void* pNext = {};
  40130. VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR displayModeProperties = {};
  40131. };
  40132. static_assert( sizeof( DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ), "struct and wrapper have different size!" );
  40133. static_assert( std::is_standard_layout<DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
  40134. template <>
  40135. struct CppType<StructureType, StructureType::eDisplayModeProperties2KHR>
  40136. {
  40137. using Type = DisplayModeProperties2KHR;
  40138. };
  40139. struct DisplayPlaneInfo2KHR
  40140. {
  40141. static const bool allowDuplicate = false;
  40142. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneInfo2KHR;
  40143. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40144. VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR(VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ = {}, uint32_t planeIndex_ = {}) VULKAN_HPP_NOEXCEPT
  40145. : mode( mode_ ), planeIndex( planeIndex_ )
  40146. {}
  40147. VULKAN_HPP_CONSTEXPR DisplayPlaneInfo2KHR( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40148. DisplayPlaneInfo2KHR( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40149. : DisplayPlaneInfo2KHR( *reinterpret_cast<DisplayPlaneInfo2KHR const *>( &rhs ) )
  40150. {}
  40151. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40152. VULKAN_HPP_CONSTEXPR_14 DisplayPlaneInfo2KHR & operator=( DisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40153. DisplayPlaneInfo2KHR & operator=( VkDisplayPlaneInfo2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40154. {
  40155. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR const *>( &rhs );
  40156. return *this;
  40157. }
  40158. DisplayPlaneInfo2KHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  40159. {
  40160. pNext = pNext_;
  40161. return *this;
  40162. }
  40163. DisplayPlaneInfo2KHR & setMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode_ ) VULKAN_HPP_NOEXCEPT
  40164. {
  40165. mode = mode_;
  40166. return *this;
  40167. }
  40168. DisplayPlaneInfo2KHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
  40169. {
  40170. planeIndex = planeIndex_;
  40171. return *this;
  40172. }
  40173. operator VkDisplayPlaneInfo2KHR const&() const VULKAN_HPP_NOEXCEPT
  40174. {
  40175. return *reinterpret_cast<const VkDisplayPlaneInfo2KHR*>( this );
  40176. }
  40177. operator VkDisplayPlaneInfo2KHR &() VULKAN_HPP_NOEXCEPT
  40178. {
  40179. return *reinterpret_cast<VkDisplayPlaneInfo2KHR*>( this );
  40180. }
  40181. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40182. auto operator<=>( DisplayPlaneInfo2KHR const& ) const = default;
  40183. #else
  40184. bool operator==( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40185. {
  40186. return ( sType == rhs.sType )
  40187. && ( pNext == rhs.pNext )
  40188. && ( mode == rhs.mode )
  40189. && ( planeIndex == rhs.planeIndex );
  40190. }
  40191. bool operator!=( DisplayPlaneInfo2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40192. {
  40193. return !operator==( rhs );
  40194. }
  40195. #endif
  40196. public:
  40197. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneInfo2KHR;
  40198. const void* pNext = {};
  40199. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode = {};
  40200. uint32_t planeIndex = {};
  40201. };
  40202. static_assert( sizeof( DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
  40203. static_assert( std::is_standard_layout<DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
  40204. template <>
  40205. struct CppType<StructureType, StructureType::eDisplayPlaneInfo2KHR>
  40206. {
  40207. using Type = DisplayPlaneInfo2KHR;
  40208. };
  40209. struct DisplayPlaneCapabilitiesKHR
  40210. {
  40211. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40212. VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR(VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent_ = {}, VULKAN_HPP_NAMESPACE::Offset2D minDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minDstExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent_ = {}) VULKAN_HPP_NOEXCEPT
  40213. : supportedAlpha( supportedAlpha_ ), minSrcPosition( minSrcPosition_ ), maxSrcPosition( maxSrcPosition_ ), minSrcExtent( minSrcExtent_ ), maxSrcExtent( maxSrcExtent_ ), minDstPosition( minDstPosition_ ), maxDstPosition( maxDstPosition_ ), minDstExtent( minDstExtent_ ), maxDstExtent( maxDstExtent_ )
  40214. {}
  40215. VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilitiesKHR( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40216. DisplayPlaneCapabilitiesKHR( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40217. : DisplayPlaneCapabilitiesKHR( *reinterpret_cast<DisplayPlaneCapabilitiesKHR const *>( &rhs ) )
  40218. {}
  40219. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40220. VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilitiesKHR & operator=( DisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40221. DisplayPlaneCapabilitiesKHR & operator=( VkDisplayPlaneCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40222. {
  40223. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR const *>( &rhs );
  40224. return *this;
  40225. }
  40226. operator VkDisplayPlaneCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
  40227. {
  40228. return *reinterpret_cast<const VkDisplayPlaneCapabilitiesKHR*>( this );
  40229. }
  40230. operator VkDisplayPlaneCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
  40231. {
  40232. return *reinterpret_cast<VkDisplayPlaneCapabilitiesKHR*>( this );
  40233. }
  40234. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40235. auto operator<=>( DisplayPlaneCapabilitiesKHR const& ) const = default;
  40236. #else
  40237. bool operator==( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40238. {
  40239. return ( supportedAlpha == rhs.supportedAlpha )
  40240. && ( minSrcPosition == rhs.minSrcPosition )
  40241. && ( maxSrcPosition == rhs.maxSrcPosition )
  40242. && ( minSrcExtent == rhs.minSrcExtent )
  40243. && ( maxSrcExtent == rhs.maxSrcExtent )
  40244. && ( minDstPosition == rhs.minDstPosition )
  40245. && ( maxDstPosition == rhs.maxDstPosition )
  40246. && ( minDstExtent == rhs.minDstExtent )
  40247. && ( maxDstExtent == rhs.maxDstExtent );
  40248. }
  40249. bool operator!=( DisplayPlaneCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40250. {
  40251. return !operator==( rhs );
  40252. }
  40253. #endif
  40254. public:
  40255. VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR supportedAlpha = {};
  40256. VULKAN_HPP_NAMESPACE::Offset2D minSrcPosition = {};
  40257. VULKAN_HPP_NAMESPACE::Offset2D maxSrcPosition = {};
  40258. VULKAN_HPP_NAMESPACE::Extent2D minSrcExtent = {};
  40259. VULKAN_HPP_NAMESPACE::Extent2D maxSrcExtent = {};
  40260. VULKAN_HPP_NAMESPACE::Offset2D minDstPosition = {};
  40261. VULKAN_HPP_NAMESPACE::Offset2D maxDstPosition = {};
  40262. VULKAN_HPP_NAMESPACE::Extent2D minDstExtent = {};
  40263. VULKAN_HPP_NAMESPACE::Extent2D maxDstExtent = {};
  40264. };
  40265. static_assert( sizeof( DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ), "struct and wrapper have different size!" );
  40266. static_assert( std::is_standard_layout<DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
  40267. struct DisplayPlaneCapabilities2KHR
  40268. {
  40269. static const bool allowDuplicate = false;
  40270. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneCapabilities2KHR;
  40271. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40272. VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR(VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities_ = {}) VULKAN_HPP_NOEXCEPT
  40273. : capabilities( capabilities_ )
  40274. {}
  40275. VULKAN_HPP_CONSTEXPR DisplayPlaneCapabilities2KHR( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40276. DisplayPlaneCapabilities2KHR( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40277. : DisplayPlaneCapabilities2KHR( *reinterpret_cast<DisplayPlaneCapabilities2KHR const *>( &rhs ) )
  40278. {}
  40279. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40280. VULKAN_HPP_CONSTEXPR_14 DisplayPlaneCapabilities2KHR & operator=( DisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40281. DisplayPlaneCapabilities2KHR & operator=( VkDisplayPlaneCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40282. {
  40283. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR const *>( &rhs );
  40284. return *this;
  40285. }
  40286. operator VkDisplayPlaneCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
  40287. {
  40288. return *reinterpret_cast<const VkDisplayPlaneCapabilities2KHR*>( this );
  40289. }
  40290. operator VkDisplayPlaneCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
  40291. {
  40292. return *reinterpret_cast<VkDisplayPlaneCapabilities2KHR*>( this );
  40293. }
  40294. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40295. auto operator<=>( DisplayPlaneCapabilities2KHR const& ) const = default;
  40296. #else
  40297. bool operator==( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40298. {
  40299. return ( sType == rhs.sType )
  40300. && ( pNext == rhs.pNext )
  40301. && ( capabilities == rhs.capabilities );
  40302. }
  40303. bool operator!=( DisplayPlaneCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40304. {
  40305. return !operator==( rhs );
  40306. }
  40307. #endif
  40308. public:
  40309. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneCapabilities2KHR;
  40310. void* pNext = {};
  40311. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities = {};
  40312. };
  40313. static_assert( sizeof( DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ), "struct and wrapper have different size!" );
  40314. static_assert( std::is_standard_layout<DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
  40315. template <>
  40316. struct CppType<StructureType, StructureType::eDisplayPlaneCapabilities2KHR>
  40317. {
  40318. using Type = DisplayPlaneCapabilities2KHR;
  40319. };
  40320. struct DisplayPlanePropertiesKHR
  40321. {
  40322. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40323. VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay_ = {}, uint32_t currentStackIndex_ = {}) VULKAN_HPP_NOEXCEPT
  40324. : currentDisplay( currentDisplay_ ), currentStackIndex( currentStackIndex_ )
  40325. {}
  40326. VULKAN_HPP_CONSTEXPR DisplayPlanePropertiesKHR( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40327. DisplayPlanePropertiesKHR( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40328. : DisplayPlanePropertiesKHR( *reinterpret_cast<DisplayPlanePropertiesKHR const *>( &rhs ) )
  40329. {}
  40330. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40331. VULKAN_HPP_CONSTEXPR_14 DisplayPlanePropertiesKHR & operator=( DisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40332. DisplayPlanePropertiesKHR & operator=( VkDisplayPlanePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40333. {
  40334. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR const *>( &rhs );
  40335. return *this;
  40336. }
  40337. operator VkDisplayPlanePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  40338. {
  40339. return *reinterpret_cast<const VkDisplayPlanePropertiesKHR*>( this );
  40340. }
  40341. operator VkDisplayPlanePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  40342. {
  40343. return *reinterpret_cast<VkDisplayPlanePropertiesKHR*>( this );
  40344. }
  40345. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40346. auto operator<=>( DisplayPlanePropertiesKHR const& ) const = default;
  40347. #else
  40348. bool operator==( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40349. {
  40350. return ( currentDisplay == rhs.currentDisplay )
  40351. && ( currentStackIndex == rhs.currentStackIndex );
  40352. }
  40353. bool operator!=( DisplayPlanePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40354. {
  40355. return !operator==( rhs );
  40356. }
  40357. #endif
  40358. public:
  40359. VULKAN_HPP_NAMESPACE::DisplayKHR currentDisplay = {};
  40360. uint32_t currentStackIndex = {};
  40361. };
  40362. static_assert( sizeof( DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ), "struct and wrapper have different size!" );
  40363. static_assert( std::is_standard_layout<DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  40364. struct DisplayPlaneProperties2KHR
  40365. {
  40366. static const bool allowDuplicate = false;
  40367. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPlaneProperties2KHR;
  40368. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40369. VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties_ = {}) VULKAN_HPP_NOEXCEPT
  40370. : displayPlaneProperties( displayPlaneProperties_ )
  40371. {}
  40372. VULKAN_HPP_CONSTEXPR DisplayPlaneProperties2KHR( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40373. DisplayPlaneProperties2KHR( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40374. : DisplayPlaneProperties2KHR( *reinterpret_cast<DisplayPlaneProperties2KHR const *>( &rhs ) )
  40375. {}
  40376. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40377. VULKAN_HPP_CONSTEXPR_14 DisplayPlaneProperties2KHR & operator=( DisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40378. DisplayPlaneProperties2KHR & operator=( VkDisplayPlaneProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40379. {
  40380. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR const *>( &rhs );
  40381. return *this;
  40382. }
  40383. operator VkDisplayPlaneProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
  40384. {
  40385. return *reinterpret_cast<const VkDisplayPlaneProperties2KHR*>( this );
  40386. }
  40387. operator VkDisplayPlaneProperties2KHR &() VULKAN_HPP_NOEXCEPT
  40388. {
  40389. return *reinterpret_cast<VkDisplayPlaneProperties2KHR*>( this );
  40390. }
  40391. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40392. auto operator<=>( DisplayPlaneProperties2KHR const& ) const = default;
  40393. #else
  40394. bool operator==( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40395. {
  40396. return ( sType == rhs.sType )
  40397. && ( pNext == rhs.pNext )
  40398. && ( displayPlaneProperties == rhs.displayPlaneProperties );
  40399. }
  40400. bool operator!=( DisplayPlaneProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40401. {
  40402. return !operator==( rhs );
  40403. }
  40404. #endif
  40405. public:
  40406. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPlaneProperties2KHR;
  40407. void* pNext = {};
  40408. VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR displayPlaneProperties = {};
  40409. };
  40410. static_assert( sizeof( DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ), "struct and wrapper have different size!" );
  40411. static_assert( std::is_standard_layout<DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
  40412. template <>
  40413. struct CppType<StructureType, StructureType::eDisplayPlaneProperties2KHR>
  40414. {
  40415. using Type = DisplayPlaneProperties2KHR;
  40416. };
  40417. struct DisplayPropertiesKHR
  40418. {
  40419. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40420. VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR(VULKAN_HPP_NAMESPACE::DisplayKHR display_ = {}, const char* displayName_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions_ = {}, VULKAN_HPP_NAMESPACE::Extent2D physicalResolution_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistentContent_ = {}) VULKAN_HPP_NOEXCEPT
  40421. : display( display_ ), displayName( displayName_ ), physicalDimensions( physicalDimensions_ ), physicalResolution( physicalResolution_ ), supportedTransforms( supportedTransforms_ ), planeReorderPossible( planeReorderPossible_ ), persistentContent( persistentContent_ )
  40422. {}
  40423. VULKAN_HPP_CONSTEXPR DisplayPropertiesKHR( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40424. DisplayPropertiesKHR( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40425. : DisplayPropertiesKHR( *reinterpret_cast<DisplayPropertiesKHR const *>( &rhs ) )
  40426. {}
  40427. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40428. VULKAN_HPP_CONSTEXPR_14 DisplayPropertiesKHR & operator=( DisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40429. DisplayPropertiesKHR & operator=( VkDisplayPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40430. {
  40431. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR const *>( &rhs );
  40432. return *this;
  40433. }
  40434. operator VkDisplayPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  40435. {
  40436. return *reinterpret_cast<const VkDisplayPropertiesKHR*>( this );
  40437. }
  40438. operator VkDisplayPropertiesKHR &() VULKAN_HPP_NOEXCEPT
  40439. {
  40440. return *reinterpret_cast<VkDisplayPropertiesKHR*>( this );
  40441. }
  40442. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40443. auto operator<=>( DisplayPropertiesKHR const& ) const = default;
  40444. #else
  40445. bool operator==( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40446. {
  40447. return ( display == rhs.display )
  40448. && ( displayName == rhs.displayName )
  40449. && ( physicalDimensions == rhs.physicalDimensions )
  40450. && ( physicalResolution == rhs.physicalResolution )
  40451. && ( supportedTransforms == rhs.supportedTransforms )
  40452. && ( planeReorderPossible == rhs.planeReorderPossible )
  40453. && ( persistentContent == rhs.persistentContent );
  40454. }
  40455. bool operator!=( DisplayPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40456. {
  40457. return !operator==( rhs );
  40458. }
  40459. #endif
  40460. public:
  40461. VULKAN_HPP_NAMESPACE::DisplayKHR display = {};
  40462. const char* displayName = {};
  40463. VULKAN_HPP_NAMESPACE::Extent2D physicalDimensions = {};
  40464. VULKAN_HPP_NAMESPACE::Extent2D physicalResolution = {};
  40465. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
  40466. VULKAN_HPP_NAMESPACE::Bool32 planeReorderPossible = {};
  40467. VULKAN_HPP_NAMESPACE::Bool32 persistentContent = {};
  40468. };
  40469. static_assert( sizeof( DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
  40470. static_assert( std::is_standard_layout<DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  40471. struct DisplayProperties2KHR
  40472. {
  40473. static const bool allowDuplicate = false;
  40474. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayProperties2KHR;
  40475. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40476. VULKAN_HPP_CONSTEXPR DisplayProperties2KHR(VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties_ = {}) VULKAN_HPP_NOEXCEPT
  40477. : displayProperties( displayProperties_ )
  40478. {}
  40479. VULKAN_HPP_CONSTEXPR DisplayProperties2KHR( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40480. DisplayProperties2KHR( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40481. : DisplayProperties2KHR( *reinterpret_cast<DisplayProperties2KHR const *>( &rhs ) )
  40482. {}
  40483. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40484. VULKAN_HPP_CONSTEXPR_14 DisplayProperties2KHR & operator=( DisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40485. DisplayProperties2KHR & operator=( VkDisplayProperties2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  40486. {
  40487. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR const *>( &rhs );
  40488. return *this;
  40489. }
  40490. operator VkDisplayProperties2KHR const&() const VULKAN_HPP_NOEXCEPT
  40491. {
  40492. return *reinterpret_cast<const VkDisplayProperties2KHR*>( this );
  40493. }
  40494. operator VkDisplayProperties2KHR &() VULKAN_HPP_NOEXCEPT
  40495. {
  40496. return *reinterpret_cast<VkDisplayProperties2KHR*>( this );
  40497. }
  40498. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40499. auto operator<=>( DisplayProperties2KHR const& ) const = default;
  40500. #else
  40501. bool operator==( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40502. {
  40503. return ( sType == rhs.sType )
  40504. && ( pNext == rhs.pNext )
  40505. && ( displayProperties == rhs.displayProperties );
  40506. }
  40507. bool operator!=( DisplayProperties2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  40508. {
  40509. return !operator==( rhs );
  40510. }
  40511. #endif
  40512. public:
  40513. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayProperties2KHR;
  40514. void* pNext = {};
  40515. VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR displayProperties = {};
  40516. };
  40517. static_assert( sizeof( DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ), "struct and wrapper have different size!" );
  40518. static_assert( std::is_standard_layout<DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
  40519. template <>
  40520. struct CppType<StructureType, StructureType::eDisplayProperties2KHR>
  40521. {
  40522. using Type = DisplayProperties2KHR;
  40523. };
  40524. struct PhysicalDeviceExternalBufferInfo
  40525. {
  40526. static const bool allowDuplicate = false;
  40527. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalBufferInfo;
  40528. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40529. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo(VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  40530. : flags( flags_ ), usage( usage_ ), handleType( handleType_ )
  40531. {}
  40532. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalBufferInfo( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40533. PhysicalDeviceExternalBufferInfo( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40534. : PhysicalDeviceExternalBufferInfo( *reinterpret_cast<PhysicalDeviceExternalBufferInfo const *>( &rhs ) )
  40535. {}
  40536. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40537. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalBufferInfo & operator=( PhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40538. PhysicalDeviceExternalBufferInfo & operator=( VkPhysicalDeviceExternalBufferInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40539. {
  40540. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo const *>( &rhs );
  40541. return *this;
  40542. }
  40543. PhysicalDeviceExternalBufferInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  40544. {
  40545. pNext = pNext_;
  40546. return *this;
  40547. }
  40548. PhysicalDeviceExternalBufferInfo & setFlags( VULKAN_HPP_NAMESPACE::BufferCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  40549. {
  40550. flags = flags_;
  40551. return *this;
  40552. }
  40553. PhysicalDeviceExternalBufferInfo & setUsage( VULKAN_HPP_NAMESPACE::BufferUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  40554. {
  40555. usage = usage_;
  40556. return *this;
  40557. }
  40558. PhysicalDeviceExternalBufferInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  40559. {
  40560. handleType = handleType_;
  40561. return *this;
  40562. }
  40563. operator VkPhysicalDeviceExternalBufferInfo const&() const VULKAN_HPP_NOEXCEPT
  40564. {
  40565. return *reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo*>( this );
  40566. }
  40567. operator VkPhysicalDeviceExternalBufferInfo &() VULKAN_HPP_NOEXCEPT
  40568. {
  40569. return *reinterpret_cast<VkPhysicalDeviceExternalBufferInfo*>( this );
  40570. }
  40571. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40572. auto operator<=>( PhysicalDeviceExternalBufferInfo const& ) const = default;
  40573. #else
  40574. bool operator==( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40575. {
  40576. return ( sType == rhs.sType )
  40577. && ( pNext == rhs.pNext )
  40578. && ( flags == rhs.flags )
  40579. && ( usage == rhs.usage )
  40580. && ( handleType == rhs.handleType );
  40581. }
  40582. bool operator!=( PhysicalDeviceExternalBufferInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40583. {
  40584. return !operator==( rhs );
  40585. }
  40586. #endif
  40587. public:
  40588. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalBufferInfo;
  40589. const void* pNext = {};
  40590. VULKAN_HPP_NAMESPACE::BufferCreateFlags flags = {};
  40591. VULKAN_HPP_NAMESPACE::BufferUsageFlags usage = {};
  40592. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  40593. };
  40594. static_assert( sizeof( PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ), "struct and wrapper have different size!" );
  40595. static_assert( std::is_standard_layout<PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
  40596. template <>
  40597. struct CppType<StructureType, StructureType::ePhysicalDeviceExternalBufferInfo>
  40598. {
  40599. using Type = PhysicalDeviceExternalBufferInfo;
  40600. };
  40601. using PhysicalDeviceExternalBufferInfoKHR = PhysicalDeviceExternalBufferInfo;
  40602. struct ExternalMemoryProperties
  40603. {
  40604. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40605. VULKAN_HPP_CONSTEXPR ExternalMemoryProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  40606. : externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
  40607. {}
  40608. VULKAN_HPP_CONSTEXPR ExternalMemoryProperties( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40609. ExternalMemoryProperties( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40610. : ExternalMemoryProperties( *reinterpret_cast<ExternalMemoryProperties const *>( &rhs ) )
  40611. {}
  40612. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40613. VULKAN_HPP_CONSTEXPR_14 ExternalMemoryProperties & operator=( ExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40614. ExternalMemoryProperties & operator=( VkExternalMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40615. {
  40616. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties const *>( &rhs );
  40617. return *this;
  40618. }
  40619. operator VkExternalMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
  40620. {
  40621. return *reinterpret_cast<const VkExternalMemoryProperties*>( this );
  40622. }
  40623. operator VkExternalMemoryProperties &() VULKAN_HPP_NOEXCEPT
  40624. {
  40625. return *reinterpret_cast<VkExternalMemoryProperties*>( this );
  40626. }
  40627. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40628. auto operator<=>( ExternalMemoryProperties const& ) const = default;
  40629. #else
  40630. bool operator==( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40631. {
  40632. return ( externalMemoryFeatures == rhs.externalMemoryFeatures )
  40633. && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
  40634. && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
  40635. }
  40636. bool operator!=( ExternalMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40637. {
  40638. return !operator==( rhs );
  40639. }
  40640. #endif
  40641. public:
  40642. VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags externalMemoryFeatures = {};
  40643. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags exportFromImportedHandleTypes = {};
  40644. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags compatibleHandleTypes = {};
  40645. };
  40646. static_assert( sizeof( ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ), "struct and wrapper have different size!" );
  40647. static_assert( std::is_standard_layout<ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
  40648. using ExternalMemoryPropertiesKHR = ExternalMemoryProperties;
  40649. struct ExternalBufferProperties
  40650. {
  40651. static const bool allowDuplicate = false;
  40652. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalBufferProperties;
  40653. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40654. VULKAN_HPP_CONSTEXPR ExternalBufferProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
  40655. : externalMemoryProperties( externalMemoryProperties_ )
  40656. {}
  40657. VULKAN_HPP_CONSTEXPR ExternalBufferProperties( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40658. ExternalBufferProperties( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40659. : ExternalBufferProperties( *reinterpret_cast<ExternalBufferProperties const *>( &rhs ) )
  40660. {}
  40661. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40662. VULKAN_HPP_CONSTEXPR_14 ExternalBufferProperties & operator=( ExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40663. ExternalBufferProperties & operator=( VkExternalBufferProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40664. {
  40665. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalBufferProperties const *>( &rhs );
  40666. return *this;
  40667. }
  40668. operator VkExternalBufferProperties const&() const VULKAN_HPP_NOEXCEPT
  40669. {
  40670. return *reinterpret_cast<const VkExternalBufferProperties*>( this );
  40671. }
  40672. operator VkExternalBufferProperties &() VULKAN_HPP_NOEXCEPT
  40673. {
  40674. return *reinterpret_cast<VkExternalBufferProperties*>( this );
  40675. }
  40676. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40677. auto operator<=>( ExternalBufferProperties const& ) const = default;
  40678. #else
  40679. bool operator==( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40680. {
  40681. return ( sType == rhs.sType )
  40682. && ( pNext == rhs.pNext )
  40683. && ( externalMemoryProperties == rhs.externalMemoryProperties );
  40684. }
  40685. bool operator!=( ExternalBufferProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40686. {
  40687. return !operator==( rhs );
  40688. }
  40689. #endif
  40690. public:
  40691. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalBufferProperties;
  40692. void* pNext = {};
  40693. VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
  40694. };
  40695. static_assert( sizeof( ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ), "struct and wrapper have different size!" );
  40696. static_assert( std::is_standard_layout<ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
  40697. template <>
  40698. struct CppType<StructureType, StructureType::eExternalBufferProperties>
  40699. {
  40700. using Type = ExternalBufferProperties;
  40701. };
  40702. using ExternalBufferPropertiesKHR = ExternalBufferProperties;
  40703. struct PhysicalDeviceExternalFenceInfo
  40704. {
  40705. static const bool allowDuplicate = false;
  40706. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalFenceInfo;
  40707. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40708. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  40709. : handleType( handleType_ )
  40710. {}
  40711. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalFenceInfo( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40712. PhysicalDeviceExternalFenceInfo( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40713. : PhysicalDeviceExternalFenceInfo( *reinterpret_cast<PhysicalDeviceExternalFenceInfo const *>( &rhs ) )
  40714. {}
  40715. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40716. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalFenceInfo & operator=( PhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40717. PhysicalDeviceExternalFenceInfo & operator=( VkPhysicalDeviceExternalFenceInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40718. {
  40719. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo const *>( &rhs );
  40720. return *this;
  40721. }
  40722. PhysicalDeviceExternalFenceInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  40723. {
  40724. pNext = pNext_;
  40725. return *this;
  40726. }
  40727. PhysicalDeviceExternalFenceInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  40728. {
  40729. handleType = handleType_;
  40730. return *this;
  40731. }
  40732. operator VkPhysicalDeviceExternalFenceInfo const&() const VULKAN_HPP_NOEXCEPT
  40733. {
  40734. return *reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo*>( this );
  40735. }
  40736. operator VkPhysicalDeviceExternalFenceInfo &() VULKAN_HPP_NOEXCEPT
  40737. {
  40738. return *reinterpret_cast<VkPhysicalDeviceExternalFenceInfo*>( this );
  40739. }
  40740. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40741. auto operator<=>( PhysicalDeviceExternalFenceInfo const& ) const = default;
  40742. #else
  40743. bool operator==( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40744. {
  40745. return ( sType == rhs.sType )
  40746. && ( pNext == rhs.pNext )
  40747. && ( handleType == rhs.handleType );
  40748. }
  40749. bool operator!=( PhysicalDeviceExternalFenceInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40750. {
  40751. return !operator==( rhs );
  40752. }
  40753. #endif
  40754. public:
  40755. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalFenceInfo;
  40756. const void* pNext = {};
  40757. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits::eOpaqueFd;
  40758. };
  40759. static_assert( sizeof( PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ), "struct and wrapper have different size!" );
  40760. static_assert( std::is_standard_layout<PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
  40761. template <>
  40762. struct CppType<StructureType, StructureType::ePhysicalDeviceExternalFenceInfo>
  40763. {
  40764. using Type = PhysicalDeviceExternalFenceInfo;
  40765. };
  40766. using PhysicalDeviceExternalFenceInfoKHR = PhysicalDeviceExternalFenceInfo;
  40767. struct ExternalFenceProperties
  40768. {
  40769. static const bool allowDuplicate = false;
  40770. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFenceProperties;
  40771. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40772. VULKAN_HPP_CONSTEXPR ExternalFenceProperties(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  40773. : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalFenceFeatures( externalFenceFeatures_ )
  40774. {}
  40775. VULKAN_HPP_CONSTEXPR ExternalFenceProperties( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40776. ExternalFenceProperties( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40777. : ExternalFenceProperties( *reinterpret_cast<ExternalFenceProperties const *>( &rhs ) )
  40778. {}
  40779. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40780. VULKAN_HPP_CONSTEXPR_14 ExternalFenceProperties & operator=( ExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40781. ExternalFenceProperties & operator=( VkExternalFenceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40782. {
  40783. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFenceProperties const *>( &rhs );
  40784. return *this;
  40785. }
  40786. operator VkExternalFenceProperties const&() const VULKAN_HPP_NOEXCEPT
  40787. {
  40788. return *reinterpret_cast<const VkExternalFenceProperties*>( this );
  40789. }
  40790. operator VkExternalFenceProperties &() VULKAN_HPP_NOEXCEPT
  40791. {
  40792. return *reinterpret_cast<VkExternalFenceProperties*>( this );
  40793. }
  40794. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40795. auto operator<=>( ExternalFenceProperties const& ) const = default;
  40796. #else
  40797. bool operator==( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40798. {
  40799. return ( sType == rhs.sType )
  40800. && ( pNext == rhs.pNext )
  40801. && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
  40802. && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
  40803. && ( externalFenceFeatures == rhs.externalFenceFeatures );
  40804. }
  40805. bool operator!=( ExternalFenceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40806. {
  40807. return !operator==( rhs );
  40808. }
  40809. #endif
  40810. public:
  40811. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFenceProperties;
  40812. void* pNext = {};
  40813. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags exportFromImportedHandleTypes = {};
  40814. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags compatibleHandleTypes = {};
  40815. VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags externalFenceFeatures = {};
  40816. };
  40817. static_assert( sizeof( ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ), "struct and wrapper have different size!" );
  40818. static_assert( std::is_standard_layout<ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
  40819. template <>
  40820. struct CppType<StructureType, StructureType::eExternalFenceProperties>
  40821. {
  40822. using Type = ExternalFenceProperties;
  40823. };
  40824. using ExternalFencePropertiesKHR = ExternalFenceProperties;
  40825. struct ImageFormatProperties
  40826. {
  40827. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40828. VULKAN_HPP_CONSTEXPR ImageFormatProperties(VULKAN_HPP_NAMESPACE::Extent3D maxExtent_ = {}, uint32_t maxMipLevels_ = {}, uint32_t maxArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize_ = {}) VULKAN_HPP_NOEXCEPT
  40829. : maxExtent( maxExtent_ ), maxMipLevels( maxMipLevels_ ), maxArrayLayers( maxArrayLayers_ ), sampleCounts( sampleCounts_ ), maxResourceSize( maxResourceSize_ )
  40830. {}
  40831. VULKAN_HPP_CONSTEXPR ImageFormatProperties( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40832. ImageFormatProperties( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40833. : ImageFormatProperties( *reinterpret_cast<ImageFormatProperties const *>( &rhs ) )
  40834. {}
  40835. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40836. VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties & operator=( ImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40837. ImageFormatProperties & operator=( VkImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40838. {
  40839. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties const *>( &rhs );
  40840. return *this;
  40841. }
  40842. operator VkImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
  40843. {
  40844. return *reinterpret_cast<const VkImageFormatProperties*>( this );
  40845. }
  40846. operator VkImageFormatProperties &() VULKAN_HPP_NOEXCEPT
  40847. {
  40848. return *reinterpret_cast<VkImageFormatProperties*>( this );
  40849. }
  40850. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40851. auto operator<=>( ImageFormatProperties const& ) const = default;
  40852. #else
  40853. bool operator==( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40854. {
  40855. return ( maxExtent == rhs.maxExtent )
  40856. && ( maxMipLevels == rhs.maxMipLevels )
  40857. && ( maxArrayLayers == rhs.maxArrayLayers )
  40858. && ( sampleCounts == rhs.sampleCounts )
  40859. && ( maxResourceSize == rhs.maxResourceSize );
  40860. }
  40861. bool operator!=( ImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  40862. {
  40863. return !operator==( rhs );
  40864. }
  40865. #endif
  40866. public:
  40867. VULKAN_HPP_NAMESPACE::Extent3D maxExtent = {};
  40868. uint32_t maxMipLevels = {};
  40869. uint32_t maxArrayLayers = {};
  40870. VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
  40871. VULKAN_HPP_NAMESPACE::DeviceSize maxResourceSize = {};
  40872. };
  40873. static_assert( sizeof( ImageFormatProperties ) == sizeof( VkImageFormatProperties ), "struct and wrapper have different size!" );
  40874. static_assert( std::is_standard_layout<ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
  40875. struct ExternalImageFormatPropertiesNV
  40876. {
  40877. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40878. VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  40879. : imageFormatProperties( imageFormatProperties_ ), externalMemoryFeatures( externalMemoryFeatures_ ), exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ )
  40880. {}
  40881. VULKAN_HPP_CONSTEXPR ExternalImageFormatPropertiesNV( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40882. ExternalImageFormatPropertiesNV( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  40883. : ExternalImageFormatPropertiesNV( *reinterpret_cast<ExternalImageFormatPropertiesNV const *>( &rhs ) )
  40884. {}
  40885. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40886. VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatPropertiesNV & operator=( ExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40887. ExternalImageFormatPropertiesNV & operator=( VkExternalImageFormatPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  40888. {
  40889. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV const *>( &rhs );
  40890. return *this;
  40891. }
  40892. operator VkExternalImageFormatPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  40893. {
  40894. return *reinterpret_cast<const VkExternalImageFormatPropertiesNV*>( this );
  40895. }
  40896. operator VkExternalImageFormatPropertiesNV &() VULKAN_HPP_NOEXCEPT
  40897. {
  40898. return *reinterpret_cast<VkExternalImageFormatPropertiesNV*>( this );
  40899. }
  40900. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40901. auto operator<=>( ExternalImageFormatPropertiesNV const& ) const = default;
  40902. #else
  40903. bool operator==( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  40904. {
  40905. return ( imageFormatProperties == rhs.imageFormatProperties )
  40906. && ( externalMemoryFeatures == rhs.externalMemoryFeatures )
  40907. && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
  40908. && ( compatibleHandleTypes == rhs.compatibleHandleTypes );
  40909. }
  40910. bool operator!=( ExternalImageFormatPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  40911. {
  40912. return !operator==( rhs );
  40913. }
  40914. #endif
  40915. public:
  40916. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
  40917. VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV externalMemoryFeatures = {};
  40918. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes = {};
  40919. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV compatibleHandleTypes = {};
  40920. };
  40921. static_assert( sizeof( ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ), "struct and wrapper have different size!" );
  40922. static_assert( std::is_standard_layout<ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  40923. struct PhysicalDeviceExternalSemaphoreInfo
  40924. {
  40925. static const bool allowDuplicate = false;
  40926. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
  40927. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40928. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  40929. : handleType( handleType_ )
  40930. {}
  40931. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalSemaphoreInfo( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40932. PhysicalDeviceExternalSemaphoreInfo( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40933. : PhysicalDeviceExternalSemaphoreInfo( *reinterpret_cast<PhysicalDeviceExternalSemaphoreInfo const *>( &rhs ) )
  40934. {}
  40935. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40936. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalSemaphoreInfo & operator=( PhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40937. PhysicalDeviceExternalSemaphoreInfo & operator=( VkPhysicalDeviceExternalSemaphoreInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  40938. {
  40939. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo const *>( &rhs );
  40940. return *this;
  40941. }
  40942. PhysicalDeviceExternalSemaphoreInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  40943. {
  40944. pNext = pNext_;
  40945. return *this;
  40946. }
  40947. PhysicalDeviceExternalSemaphoreInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  40948. {
  40949. handleType = handleType_;
  40950. return *this;
  40951. }
  40952. operator VkPhysicalDeviceExternalSemaphoreInfo const&() const VULKAN_HPP_NOEXCEPT
  40953. {
  40954. return *reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo*>( this );
  40955. }
  40956. operator VkPhysicalDeviceExternalSemaphoreInfo &() VULKAN_HPP_NOEXCEPT
  40957. {
  40958. return *reinterpret_cast<VkPhysicalDeviceExternalSemaphoreInfo*>( this );
  40959. }
  40960. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  40961. auto operator<=>( PhysicalDeviceExternalSemaphoreInfo const& ) const = default;
  40962. #else
  40963. bool operator==( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40964. {
  40965. return ( sType == rhs.sType )
  40966. && ( pNext == rhs.pNext )
  40967. && ( handleType == rhs.handleType );
  40968. }
  40969. bool operator!=( PhysicalDeviceExternalSemaphoreInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  40970. {
  40971. return !operator==( rhs );
  40972. }
  40973. #endif
  40974. public:
  40975. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalSemaphoreInfo;
  40976. const void* pNext = {};
  40977. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd;
  40978. };
  40979. static_assert( sizeof( PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ), "struct and wrapper have different size!" );
  40980. static_assert( std::is_standard_layout<PhysicalDeviceExternalSemaphoreInfo>::value, "struct wrapper is not a standard layout!" );
  40981. template <>
  40982. struct CppType<StructureType, StructureType::ePhysicalDeviceExternalSemaphoreInfo>
  40983. {
  40984. using Type = PhysicalDeviceExternalSemaphoreInfo;
  40985. };
  40986. using PhysicalDeviceExternalSemaphoreInfoKHR = PhysicalDeviceExternalSemaphoreInfo;
  40987. struct ExternalSemaphoreProperties
  40988. {
  40989. static const bool allowDuplicate = false;
  40990. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalSemaphoreProperties;
  40991. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  40992. VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes_ = {}, VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  40993. : exportFromImportedHandleTypes( exportFromImportedHandleTypes_ ), compatibleHandleTypes( compatibleHandleTypes_ ), externalSemaphoreFeatures( externalSemaphoreFeatures_ )
  40994. {}
  40995. VULKAN_HPP_CONSTEXPR ExternalSemaphoreProperties( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  40996. ExternalSemaphoreProperties( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  40997. : ExternalSemaphoreProperties( *reinterpret_cast<ExternalSemaphoreProperties const *>( &rhs ) )
  40998. {}
  40999. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41000. VULKAN_HPP_CONSTEXPR_14 ExternalSemaphoreProperties & operator=( ExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41001. ExternalSemaphoreProperties & operator=( VkExternalSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41002. {
  41003. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties const *>( &rhs );
  41004. return *this;
  41005. }
  41006. operator VkExternalSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
  41007. {
  41008. return *reinterpret_cast<const VkExternalSemaphoreProperties*>( this );
  41009. }
  41010. operator VkExternalSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
  41011. {
  41012. return *reinterpret_cast<VkExternalSemaphoreProperties*>( this );
  41013. }
  41014. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41015. auto operator<=>( ExternalSemaphoreProperties const& ) const = default;
  41016. #else
  41017. bool operator==( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41018. {
  41019. return ( sType == rhs.sType )
  41020. && ( pNext == rhs.pNext )
  41021. && ( exportFromImportedHandleTypes == rhs.exportFromImportedHandleTypes )
  41022. && ( compatibleHandleTypes == rhs.compatibleHandleTypes )
  41023. && ( externalSemaphoreFeatures == rhs.externalSemaphoreFeatures );
  41024. }
  41025. bool operator!=( ExternalSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41026. {
  41027. return !operator==( rhs );
  41028. }
  41029. #endif
  41030. public:
  41031. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalSemaphoreProperties;
  41032. void* pNext = {};
  41033. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes = {};
  41034. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags compatibleHandleTypes = {};
  41035. VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags externalSemaphoreFeatures = {};
  41036. };
  41037. static_assert( sizeof( ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ), "struct and wrapper have different size!" );
  41038. static_assert( std::is_standard_layout<ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
  41039. template <>
  41040. struct CppType<StructureType, StructureType::eExternalSemaphoreProperties>
  41041. {
  41042. using Type = ExternalSemaphoreProperties;
  41043. };
  41044. using ExternalSemaphorePropertiesKHR = ExternalSemaphoreProperties;
  41045. struct PhysicalDeviceFeatures2
  41046. {
  41047. static const bool allowDuplicate = false;
  41048. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFeatures2;
  41049. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41050. VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2(VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features_ = {}) VULKAN_HPP_NOEXCEPT
  41051. : features( features_ )
  41052. {}
  41053. VULKAN_HPP_CONSTEXPR PhysicalDeviceFeatures2( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41054. PhysicalDeviceFeatures2( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41055. : PhysicalDeviceFeatures2( *reinterpret_cast<PhysicalDeviceFeatures2 const *>( &rhs ) )
  41056. {}
  41057. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41058. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFeatures2 & operator=( PhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41059. PhysicalDeviceFeatures2 & operator=( VkPhysicalDeviceFeatures2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41060. {
  41061. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 const *>( &rhs );
  41062. return *this;
  41063. }
  41064. PhysicalDeviceFeatures2 & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  41065. {
  41066. pNext = pNext_;
  41067. return *this;
  41068. }
  41069. PhysicalDeviceFeatures2 & setFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures const & features_ ) VULKAN_HPP_NOEXCEPT
  41070. {
  41071. features = features_;
  41072. return *this;
  41073. }
  41074. operator VkPhysicalDeviceFeatures2 const&() const VULKAN_HPP_NOEXCEPT
  41075. {
  41076. return *reinterpret_cast<const VkPhysicalDeviceFeatures2*>( this );
  41077. }
  41078. operator VkPhysicalDeviceFeatures2 &() VULKAN_HPP_NOEXCEPT
  41079. {
  41080. return *reinterpret_cast<VkPhysicalDeviceFeatures2*>( this );
  41081. }
  41082. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41083. auto operator<=>( PhysicalDeviceFeatures2 const& ) const = default;
  41084. #else
  41085. bool operator==( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41086. {
  41087. return ( sType == rhs.sType )
  41088. && ( pNext == rhs.pNext )
  41089. && ( features == rhs.features );
  41090. }
  41091. bool operator!=( PhysicalDeviceFeatures2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41092. {
  41093. return !operator==( rhs );
  41094. }
  41095. #endif
  41096. public:
  41097. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFeatures2;
  41098. void* pNext = {};
  41099. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features = {};
  41100. };
  41101. static_assert( sizeof( PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ), "struct and wrapper have different size!" );
  41102. static_assert( std::is_standard_layout<PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
  41103. template <>
  41104. struct CppType<StructureType, StructureType::ePhysicalDeviceFeatures2>
  41105. {
  41106. using Type = PhysicalDeviceFeatures2;
  41107. };
  41108. using PhysicalDeviceFeatures2KHR = PhysicalDeviceFeatures2;
  41109. struct FormatProperties
  41110. {
  41111. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41112. VULKAN_HPP_CONSTEXPR FormatProperties(VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  41113. : linearTilingFeatures( linearTilingFeatures_ ), optimalTilingFeatures( optimalTilingFeatures_ ), bufferFeatures( bufferFeatures_ )
  41114. {}
  41115. VULKAN_HPP_CONSTEXPR FormatProperties( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41116. FormatProperties( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41117. : FormatProperties( *reinterpret_cast<FormatProperties const *>( &rhs ) )
  41118. {}
  41119. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41120. VULKAN_HPP_CONSTEXPR_14 FormatProperties & operator=( FormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41121. FormatProperties & operator=( VkFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41122. {
  41123. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties const *>( &rhs );
  41124. return *this;
  41125. }
  41126. operator VkFormatProperties const&() const VULKAN_HPP_NOEXCEPT
  41127. {
  41128. return *reinterpret_cast<const VkFormatProperties*>( this );
  41129. }
  41130. operator VkFormatProperties &() VULKAN_HPP_NOEXCEPT
  41131. {
  41132. return *reinterpret_cast<VkFormatProperties*>( this );
  41133. }
  41134. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41135. auto operator<=>( FormatProperties const& ) const = default;
  41136. #else
  41137. bool operator==( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41138. {
  41139. return ( linearTilingFeatures == rhs.linearTilingFeatures )
  41140. && ( optimalTilingFeatures == rhs.optimalTilingFeatures )
  41141. && ( bufferFeatures == rhs.bufferFeatures );
  41142. }
  41143. bool operator!=( FormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41144. {
  41145. return !operator==( rhs );
  41146. }
  41147. #endif
  41148. public:
  41149. VULKAN_HPP_NAMESPACE::FormatFeatureFlags linearTilingFeatures = {};
  41150. VULKAN_HPP_NAMESPACE::FormatFeatureFlags optimalTilingFeatures = {};
  41151. VULKAN_HPP_NAMESPACE::FormatFeatureFlags bufferFeatures = {};
  41152. };
  41153. static_assert( sizeof( FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
  41154. static_assert( std::is_standard_layout<FormatProperties>::value, "struct wrapper is not a standard layout!" );
  41155. struct FormatProperties2
  41156. {
  41157. static const bool allowDuplicate = false;
  41158. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFormatProperties2;
  41159. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41160. VULKAN_HPP_CONSTEXPR FormatProperties2(VULKAN_HPP_NAMESPACE::FormatProperties formatProperties_ = {}) VULKAN_HPP_NOEXCEPT
  41161. : formatProperties( formatProperties_ )
  41162. {}
  41163. VULKAN_HPP_CONSTEXPR FormatProperties2( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41164. FormatProperties2( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41165. : FormatProperties2( *reinterpret_cast<FormatProperties2 const *>( &rhs ) )
  41166. {}
  41167. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41168. VULKAN_HPP_CONSTEXPR_14 FormatProperties2 & operator=( FormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41169. FormatProperties2 & operator=( VkFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41170. {
  41171. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FormatProperties2 const *>( &rhs );
  41172. return *this;
  41173. }
  41174. operator VkFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
  41175. {
  41176. return *reinterpret_cast<const VkFormatProperties2*>( this );
  41177. }
  41178. operator VkFormatProperties2 &() VULKAN_HPP_NOEXCEPT
  41179. {
  41180. return *reinterpret_cast<VkFormatProperties2*>( this );
  41181. }
  41182. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41183. auto operator<=>( FormatProperties2 const& ) const = default;
  41184. #else
  41185. bool operator==( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41186. {
  41187. return ( sType == rhs.sType )
  41188. && ( pNext == rhs.pNext )
  41189. && ( formatProperties == rhs.formatProperties );
  41190. }
  41191. bool operator!=( FormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41192. {
  41193. return !operator==( rhs );
  41194. }
  41195. #endif
  41196. public:
  41197. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFormatProperties2;
  41198. void* pNext = {};
  41199. VULKAN_HPP_NAMESPACE::FormatProperties formatProperties = {};
  41200. };
  41201. static_assert( sizeof( FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
  41202. static_assert( std::is_standard_layout<FormatProperties2>::value, "struct wrapper is not a standard layout!" );
  41203. template <>
  41204. struct CppType<StructureType, StructureType::eFormatProperties2>
  41205. {
  41206. using Type = FormatProperties2;
  41207. };
  41208. using FormatProperties2KHR = FormatProperties2;
  41209. struct PhysicalDeviceFragmentShadingRateKHR
  41210. {
  41211. static const bool allowDuplicate = false;
  41212. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
  41213. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41214. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}) VULKAN_HPP_NOEXCEPT
  41215. : sampleCounts( sampleCounts_ ), fragmentSize( fragmentSize_ )
  41216. {}
  41217. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateKHR( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41218. PhysicalDeviceFragmentShadingRateKHR( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  41219. : PhysicalDeviceFragmentShadingRateKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateKHR const *>( &rhs ) )
  41220. {}
  41221. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41222. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateKHR & operator=( PhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41223. PhysicalDeviceFragmentShadingRateKHR & operator=( VkPhysicalDeviceFragmentShadingRateKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  41224. {
  41225. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR const *>( &rhs );
  41226. return *this;
  41227. }
  41228. operator VkPhysicalDeviceFragmentShadingRateKHR const&() const VULKAN_HPP_NOEXCEPT
  41229. {
  41230. return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateKHR*>( this );
  41231. }
  41232. operator VkPhysicalDeviceFragmentShadingRateKHR &() VULKAN_HPP_NOEXCEPT
  41233. {
  41234. return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR*>( this );
  41235. }
  41236. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41237. auto operator<=>( PhysicalDeviceFragmentShadingRateKHR const& ) const = default;
  41238. #else
  41239. bool operator==( PhysicalDeviceFragmentShadingRateKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  41240. {
  41241. return ( sType == rhs.sType )
  41242. && ( pNext == rhs.pNext )
  41243. && ( sampleCounts == rhs.sampleCounts )
  41244. && ( fragmentSize == rhs.fragmentSize );
  41245. }
  41246. bool operator!=( PhysicalDeviceFragmentShadingRateKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  41247. {
  41248. return !operator==( rhs );
  41249. }
  41250. #endif
  41251. public:
  41252. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateKHR;
  41253. void* pNext = {};
  41254. VULKAN_HPP_NAMESPACE::SampleCountFlags sampleCounts = {};
  41255. VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
  41256. };
  41257. static_assert( sizeof( PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ), "struct and wrapper have different size!" );
  41258. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateKHR>::value, "struct wrapper is not a standard layout!" );
  41259. template <>
  41260. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateKHR>
  41261. {
  41262. using Type = PhysicalDeviceFragmentShadingRateKHR;
  41263. };
  41264. struct PhysicalDeviceImageFormatInfo2
  41265. {
  41266. static const bool allowDuplicate = false;
  41267. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageFormatInfo2;
  41268. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41269. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  41270. : format( format_ ), type( type_ ), tiling( tiling_ ), usage( usage_ ), flags( flags_ )
  41271. {}
  41272. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageFormatInfo2( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41273. PhysicalDeviceImageFormatInfo2( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41274. : PhysicalDeviceImageFormatInfo2( *reinterpret_cast<PhysicalDeviceImageFormatInfo2 const *>( &rhs ) )
  41275. {}
  41276. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41277. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageFormatInfo2 & operator=( PhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41278. PhysicalDeviceImageFormatInfo2 & operator=( VkPhysicalDeviceImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41279. {
  41280. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 const *>( &rhs );
  41281. return *this;
  41282. }
  41283. PhysicalDeviceImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  41284. {
  41285. pNext = pNext_;
  41286. return *this;
  41287. }
  41288. PhysicalDeviceImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  41289. {
  41290. format = format_;
  41291. return *this;
  41292. }
  41293. PhysicalDeviceImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
  41294. {
  41295. type = type_;
  41296. return *this;
  41297. }
  41298. PhysicalDeviceImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
  41299. {
  41300. tiling = tiling_;
  41301. return *this;
  41302. }
  41303. PhysicalDeviceImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  41304. {
  41305. usage = usage_;
  41306. return *this;
  41307. }
  41308. PhysicalDeviceImageFormatInfo2 & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  41309. {
  41310. flags = flags_;
  41311. return *this;
  41312. }
  41313. operator VkPhysicalDeviceImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
  41314. {
  41315. return *reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2*>( this );
  41316. }
  41317. operator VkPhysicalDeviceImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
  41318. {
  41319. return *reinterpret_cast<VkPhysicalDeviceImageFormatInfo2*>( this );
  41320. }
  41321. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41322. auto operator<=>( PhysicalDeviceImageFormatInfo2 const& ) const = default;
  41323. #else
  41324. bool operator==( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41325. {
  41326. return ( sType == rhs.sType )
  41327. && ( pNext == rhs.pNext )
  41328. && ( format == rhs.format )
  41329. && ( type == rhs.type )
  41330. && ( tiling == rhs.tiling )
  41331. && ( usage == rhs.usage )
  41332. && ( flags == rhs.flags );
  41333. }
  41334. bool operator!=( PhysicalDeviceImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41335. {
  41336. return !operator==( rhs );
  41337. }
  41338. #endif
  41339. public:
  41340. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageFormatInfo2;
  41341. const void* pNext = {};
  41342. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  41343. VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
  41344. VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
  41345. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  41346. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
  41347. };
  41348. static_assert( sizeof( PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ), "struct and wrapper have different size!" );
  41349. static_assert( std::is_standard_layout<PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
  41350. template <>
  41351. struct CppType<StructureType, StructureType::ePhysicalDeviceImageFormatInfo2>
  41352. {
  41353. using Type = PhysicalDeviceImageFormatInfo2;
  41354. };
  41355. using PhysicalDeviceImageFormatInfo2KHR = PhysicalDeviceImageFormatInfo2;
  41356. struct ImageFormatProperties2
  41357. {
  41358. static const bool allowDuplicate = false;
  41359. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatProperties2;
  41360. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41361. VULKAN_HPP_CONSTEXPR ImageFormatProperties2(VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties_ = {}) VULKAN_HPP_NOEXCEPT
  41362. : imageFormatProperties( imageFormatProperties_ )
  41363. {}
  41364. VULKAN_HPP_CONSTEXPR ImageFormatProperties2( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41365. ImageFormatProperties2( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41366. : ImageFormatProperties2( *reinterpret_cast<ImageFormatProperties2 const *>( &rhs ) )
  41367. {}
  41368. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41369. VULKAN_HPP_CONSTEXPR_14 ImageFormatProperties2 & operator=( ImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41370. ImageFormatProperties2 & operator=( VkImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41371. {
  41372. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatProperties2 const *>( &rhs );
  41373. return *this;
  41374. }
  41375. operator VkImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
  41376. {
  41377. return *reinterpret_cast<const VkImageFormatProperties2*>( this );
  41378. }
  41379. operator VkImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
  41380. {
  41381. return *reinterpret_cast<VkImageFormatProperties2*>( this );
  41382. }
  41383. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41384. auto operator<=>( ImageFormatProperties2 const& ) const = default;
  41385. #else
  41386. bool operator==( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41387. {
  41388. return ( sType == rhs.sType )
  41389. && ( pNext == rhs.pNext )
  41390. && ( imageFormatProperties == rhs.imageFormatProperties );
  41391. }
  41392. bool operator!=( ImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41393. {
  41394. return !operator==( rhs );
  41395. }
  41396. #endif
  41397. public:
  41398. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatProperties2;
  41399. void* pNext = {};
  41400. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties = {};
  41401. };
  41402. static_assert( sizeof( ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ), "struct and wrapper have different size!" );
  41403. static_assert( std::is_standard_layout<ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
  41404. template <>
  41405. struct CppType<StructureType, StructureType::eImageFormatProperties2>
  41406. {
  41407. using Type = ImageFormatProperties2;
  41408. };
  41409. using ImageFormatProperties2KHR = ImageFormatProperties2;
  41410. struct MemoryType
  41411. {
  41412. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41413. VULKAN_HPP_CONSTEXPR MemoryType(VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
  41414. : propertyFlags( propertyFlags_ ), heapIndex( heapIndex_ )
  41415. {}
  41416. VULKAN_HPP_CONSTEXPR MemoryType( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41417. MemoryType( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
  41418. : MemoryType( *reinterpret_cast<MemoryType const *>( &rhs ) )
  41419. {}
  41420. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41421. VULKAN_HPP_CONSTEXPR_14 MemoryType & operator=( MemoryType const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41422. MemoryType & operator=( VkMemoryType const & rhs ) VULKAN_HPP_NOEXCEPT
  41423. {
  41424. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryType const *>( &rhs );
  41425. return *this;
  41426. }
  41427. operator VkMemoryType const&() const VULKAN_HPP_NOEXCEPT
  41428. {
  41429. return *reinterpret_cast<const VkMemoryType*>( this );
  41430. }
  41431. operator VkMemoryType &() VULKAN_HPP_NOEXCEPT
  41432. {
  41433. return *reinterpret_cast<VkMemoryType*>( this );
  41434. }
  41435. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41436. auto operator<=>( MemoryType const& ) const = default;
  41437. #else
  41438. bool operator==( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
  41439. {
  41440. return ( propertyFlags == rhs.propertyFlags )
  41441. && ( heapIndex == rhs.heapIndex );
  41442. }
  41443. bool operator!=( MemoryType const& rhs ) const VULKAN_HPP_NOEXCEPT
  41444. {
  41445. return !operator==( rhs );
  41446. }
  41447. #endif
  41448. public:
  41449. VULKAN_HPP_NAMESPACE::MemoryPropertyFlags propertyFlags = {};
  41450. uint32_t heapIndex = {};
  41451. };
  41452. static_assert( sizeof( MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
  41453. static_assert( std::is_standard_layout<MemoryType>::value, "struct wrapper is not a standard layout!" );
  41454. struct MemoryHeap
  41455. {
  41456. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41457. VULKAN_HPP_CONSTEXPR MemoryHeap(VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags_ = {}) VULKAN_HPP_NOEXCEPT
  41458. : size( size_ ), flags( flags_ )
  41459. {}
  41460. VULKAN_HPP_CONSTEXPR MemoryHeap( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41461. MemoryHeap( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
  41462. : MemoryHeap( *reinterpret_cast<MemoryHeap const *>( &rhs ) )
  41463. {}
  41464. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41465. VULKAN_HPP_CONSTEXPR_14 MemoryHeap & operator=( MemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41466. MemoryHeap & operator=( VkMemoryHeap const & rhs ) VULKAN_HPP_NOEXCEPT
  41467. {
  41468. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryHeap const *>( &rhs );
  41469. return *this;
  41470. }
  41471. operator VkMemoryHeap const&() const VULKAN_HPP_NOEXCEPT
  41472. {
  41473. return *reinterpret_cast<const VkMemoryHeap*>( this );
  41474. }
  41475. operator VkMemoryHeap &() VULKAN_HPP_NOEXCEPT
  41476. {
  41477. return *reinterpret_cast<VkMemoryHeap*>( this );
  41478. }
  41479. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41480. auto operator<=>( MemoryHeap const& ) const = default;
  41481. #else
  41482. bool operator==( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
  41483. {
  41484. return ( size == rhs.size )
  41485. && ( flags == rhs.flags );
  41486. }
  41487. bool operator!=( MemoryHeap const& rhs ) const VULKAN_HPP_NOEXCEPT
  41488. {
  41489. return !operator==( rhs );
  41490. }
  41491. #endif
  41492. public:
  41493. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  41494. VULKAN_HPP_NAMESPACE::MemoryHeapFlags flags = {};
  41495. };
  41496. static_assert( sizeof( MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
  41497. static_assert( std::is_standard_layout<MemoryHeap>::value, "struct wrapper is not a standard layout!" );
  41498. struct PhysicalDeviceMemoryProperties
  41499. {
  41500. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41501. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties(uint32_t memoryTypeCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryType,VK_MAX_MEMORY_TYPES> const& memoryTypes_ = {}, uint32_t memoryHeapCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::MemoryHeap,VK_MAX_MEMORY_HEAPS> const& memoryHeaps_ = {}) VULKAN_HPP_NOEXCEPT
  41502. : memoryTypeCount( memoryTypeCount_ ), memoryTypes( memoryTypes_ ), memoryHeapCount( memoryHeapCount_ ), memoryHeaps( memoryHeaps_ )
  41503. {}
  41504. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41505. PhysicalDeviceMemoryProperties( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41506. : PhysicalDeviceMemoryProperties( *reinterpret_cast<PhysicalDeviceMemoryProperties const *>( &rhs ) )
  41507. {}
  41508. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41509. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties & operator=( PhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41510. PhysicalDeviceMemoryProperties & operator=( VkPhysicalDeviceMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41511. {
  41512. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties const *>( &rhs );
  41513. return *this;
  41514. }
  41515. operator VkPhysicalDeviceMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
  41516. {
  41517. return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties*>( this );
  41518. }
  41519. operator VkPhysicalDeviceMemoryProperties &() VULKAN_HPP_NOEXCEPT
  41520. {
  41521. return *reinterpret_cast<VkPhysicalDeviceMemoryProperties*>( this );
  41522. }
  41523. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41524. auto operator<=>( PhysicalDeviceMemoryProperties const& ) const = default;
  41525. #else
  41526. bool operator==( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41527. {
  41528. return ( memoryTypeCount == rhs.memoryTypeCount )
  41529. && ( memoryTypes == rhs.memoryTypes )
  41530. && ( memoryHeapCount == rhs.memoryHeapCount )
  41531. && ( memoryHeaps == rhs.memoryHeaps );
  41532. }
  41533. bool operator!=( PhysicalDeviceMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41534. {
  41535. return !operator==( rhs );
  41536. }
  41537. #endif
  41538. public:
  41539. uint32_t memoryTypeCount = {};
  41540. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryType, VK_MAX_MEMORY_TYPES> memoryTypes = {};
  41541. uint32_t memoryHeapCount = {};
  41542. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::MemoryHeap, VK_MAX_MEMORY_HEAPS> memoryHeaps = {};
  41543. };
  41544. static_assert( sizeof( PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ), "struct and wrapper have different size!" );
  41545. static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
  41546. struct PhysicalDeviceMemoryProperties2
  41547. {
  41548. static const bool allowDuplicate = false;
  41549. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryProperties2;
  41550. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41551. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
  41552. : memoryProperties( memoryProperties_ )
  41553. {}
  41554. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41555. PhysicalDeviceMemoryProperties2( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41556. : PhysicalDeviceMemoryProperties2( *reinterpret_cast<PhysicalDeviceMemoryProperties2 const *>( &rhs ) )
  41557. {}
  41558. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41559. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryProperties2 & operator=( PhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41560. PhysicalDeviceMemoryProperties2 & operator=( VkPhysicalDeviceMemoryProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  41561. {
  41562. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 const *>( &rhs );
  41563. return *this;
  41564. }
  41565. operator VkPhysicalDeviceMemoryProperties2 const&() const VULKAN_HPP_NOEXCEPT
  41566. {
  41567. return *reinterpret_cast<const VkPhysicalDeviceMemoryProperties2*>( this );
  41568. }
  41569. operator VkPhysicalDeviceMemoryProperties2 &() VULKAN_HPP_NOEXCEPT
  41570. {
  41571. return *reinterpret_cast<VkPhysicalDeviceMemoryProperties2*>( this );
  41572. }
  41573. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41574. auto operator<=>( PhysicalDeviceMemoryProperties2 const& ) const = default;
  41575. #else
  41576. bool operator==( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41577. {
  41578. return ( sType == rhs.sType )
  41579. && ( pNext == rhs.pNext )
  41580. && ( memoryProperties == rhs.memoryProperties );
  41581. }
  41582. bool operator!=( PhysicalDeviceMemoryProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  41583. {
  41584. return !operator==( rhs );
  41585. }
  41586. #endif
  41587. public:
  41588. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryProperties2;
  41589. void* pNext = {};
  41590. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties = {};
  41591. };
  41592. static_assert( sizeof( PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ), "struct and wrapper have different size!" );
  41593. static_assert( std::is_standard_layout<PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
  41594. template <>
  41595. struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryProperties2>
  41596. {
  41597. using Type = PhysicalDeviceMemoryProperties2;
  41598. };
  41599. using PhysicalDeviceMemoryProperties2KHR = PhysicalDeviceMemoryProperties2;
  41600. struct MultisamplePropertiesEXT
  41601. {
  41602. static const bool allowDuplicate = false;
  41603. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMultisamplePropertiesEXT;
  41604. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41605. VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}) VULKAN_HPP_NOEXCEPT
  41606. : maxSampleLocationGridSize( maxSampleLocationGridSize_ )
  41607. {}
  41608. VULKAN_HPP_CONSTEXPR MultisamplePropertiesEXT( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41609. MultisamplePropertiesEXT( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  41610. : MultisamplePropertiesEXT( *reinterpret_cast<MultisamplePropertiesEXT const *>( &rhs ) )
  41611. {}
  41612. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41613. VULKAN_HPP_CONSTEXPR_14 MultisamplePropertiesEXT & operator=( MultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41614. MultisamplePropertiesEXT & operator=( VkMultisamplePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  41615. {
  41616. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT const *>( &rhs );
  41617. return *this;
  41618. }
  41619. operator VkMultisamplePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  41620. {
  41621. return *reinterpret_cast<const VkMultisamplePropertiesEXT*>( this );
  41622. }
  41623. operator VkMultisamplePropertiesEXT &() VULKAN_HPP_NOEXCEPT
  41624. {
  41625. return *reinterpret_cast<VkMultisamplePropertiesEXT*>( this );
  41626. }
  41627. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41628. auto operator<=>( MultisamplePropertiesEXT const& ) const = default;
  41629. #else
  41630. bool operator==( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  41631. {
  41632. return ( sType == rhs.sType )
  41633. && ( pNext == rhs.pNext )
  41634. && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize );
  41635. }
  41636. bool operator!=( MultisamplePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  41637. {
  41638. return !operator==( rhs );
  41639. }
  41640. #endif
  41641. public:
  41642. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMultisamplePropertiesEXT;
  41643. void* pNext = {};
  41644. VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
  41645. };
  41646. static_assert( sizeof( MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ), "struct and wrapper have different size!" );
  41647. static_assert( std::is_standard_layout<MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  41648. template <>
  41649. struct CppType<StructureType, StructureType::eMultisamplePropertiesEXT>
  41650. {
  41651. using Type = MultisamplePropertiesEXT;
  41652. };
  41653. struct PhysicalDeviceLimits
  41654. {
  41655. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41656. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits(uint32_t maxImageDimension1D_ = {}, uint32_t maxImageDimension2D_ = {}, uint32_t maxImageDimension3D_ = {}, uint32_t maxImageDimensionCube_ = {}, uint32_t maxImageArrayLayers_ = {}, uint32_t maxTexelBufferElements_ = {}, uint32_t maxUniformBufferRange_ = {}, uint32_t maxStorageBufferRange_ = {}, uint32_t maxPushConstantsSize_ = {}, uint32_t maxMemoryAllocationCount_ = {}, uint32_t maxSamplerAllocationCount_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize_ = {}, uint32_t maxBoundDescriptorSets_ = {}, uint32_t maxPerStageDescriptorSamplers_ = {}, uint32_t maxPerStageDescriptorUniformBuffers_ = {}, uint32_t maxPerStageDescriptorStorageBuffers_ = {}, uint32_t maxPerStageDescriptorSampledImages_ = {}, uint32_t maxPerStageDescriptorStorageImages_ = {}, uint32_t maxPerStageDescriptorInputAttachments_ = {}, uint32_t maxPerStageResources_ = {}, uint32_t maxDescriptorSetSamplers_ = {}, uint32_t maxDescriptorSetUniformBuffers_ = {}, uint32_t maxDescriptorSetUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetStorageBuffers_ = {}, uint32_t maxDescriptorSetStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetSampledImages_ = {}, uint32_t maxDescriptorSetStorageImages_ = {}, uint32_t maxDescriptorSetInputAttachments_ = {}, uint32_t maxVertexInputAttributes_ = {}, uint32_t maxVertexInputBindings_ = {}, uint32_t maxVertexInputAttributeOffset_ = {}, uint32_t maxVertexInputBindingStride_ = {}, uint32_t maxVertexOutputComponents_ = {}, uint32_t maxTessellationGenerationLevel_ = {}, uint32_t maxTessellationPatchSize_ = {}, uint32_t maxTessellationControlPerVertexInputComponents_ = {}, uint32_t maxTessellationControlPerVertexOutputComponents_ = {}, uint32_t maxTessellationControlPerPatchOutputComponents_ = {}, uint32_t maxTessellationControlTotalOutputComponents_ = {}, uint32_t maxTessellationEvaluationInputComponents_ = {}, uint32_t maxTessellationEvaluationOutputComponents_ = {}, uint32_t maxGeometryShaderInvocations_ = {}, uint32_t maxGeometryInputComponents_ = {}, uint32_t maxGeometryOutputComponents_ = {}, uint32_t maxGeometryOutputVertices_ = {}, uint32_t maxGeometryTotalOutputComponents_ = {}, uint32_t maxFragmentInputComponents_ = {}, uint32_t maxFragmentOutputAttachments_ = {}, uint32_t maxFragmentDualSrcAttachments_ = {}, uint32_t maxFragmentCombinedOutputResources_ = {}, uint32_t maxComputeSharedMemorySize_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupCount_ = {}, uint32_t maxComputeWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxComputeWorkGroupSize_ = {}, uint32_t subPixelPrecisionBits_ = {}, uint32_t subTexelPrecisionBits_ = {}, uint32_t mipmapPrecisionBits_ = {}, uint32_t maxDrawIndexedIndexValue_ = {}, uint32_t maxDrawIndirectCount_ = {}, float maxSamplerLodBias_ = {}, float maxSamplerAnisotropy_ = {}, uint32_t maxViewports_ = {}, std::array<uint32_t,2> const& maxViewportDimensions_ = {}, std::array<float,2> const& viewportBoundsRange_ = {}, uint32_t viewportSubPixelBits_ = {}, size_t minMemoryMapAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment_ = {}, int32_t minTexelOffset_ = {}, uint32_t maxTexelOffset_ = {}, int32_t minTexelGatherOffset_ = {}, uint32_t maxTexelGatherOffset_ = {}, float minInterpolationOffset_ = {}, float maxInterpolationOffset_ = {}, uint32_t subPixelInterpolationOffsetBits_ = {}, uint32_t maxFramebufferWidth_ = {}, uint32_t maxFramebufferHeight_ = {}, uint32_t maxFramebufferLayers_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts_ = {}, uint32_t maxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts_ = {}, uint32_t maxSampleMaskWords_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics_ = {}, float timestampPeriod_ = {}, uint32_t maxClipDistances_ = {}, uint32_t maxCullDistances_ = {}, uint32_t maxCombinedClipAndCullDistances_ = {}, uint32_t discreteQueuePriorities_ = {}, std::array<float,2> const& pointSizeRange_ = {}, std::array<float,2> const& lineWidthRange_ = {}, float pointSizeGranularity_ = {}, float lineWidthGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 strictLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize_ = {}) VULKAN_HPP_NOEXCEPT
  41657. : maxImageDimension1D( maxImageDimension1D_ ), maxImageDimension2D( maxImageDimension2D_ ), maxImageDimension3D( maxImageDimension3D_ ), maxImageDimensionCube( maxImageDimensionCube_ ), maxImageArrayLayers( maxImageArrayLayers_ ), maxTexelBufferElements( maxTexelBufferElements_ ), maxUniformBufferRange( maxUniformBufferRange_ ), maxStorageBufferRange( maxStorageBufferRange_ ), maxPushConstantsSize( maxPushConstantsSize_ ), maxMemoryAllocationCount( maxMemoryAllocationCount_ ), maxSamplerAllocationCount( maxSamplerAllocationCount_ ), bufferImageGranularity( bufferImageGranularity_ ), sparseAddressSpaceSize( sparseAddressSpaceSize_ ), maxBoundDescriptorSets( maxBoundDescriptorSets_ ), maxPerStageDescriptorSamplers( maxPerStageDescriptorSamplers_ ), maxPerStageDescriptorUniformBuffers( maxPerStageDescriptorUniformBuffers_ ), maxPerStageDescriptorStorageBuffers( maxPerStageDescriptorStorageBuffers_ ), maxPerStageDescriptorSampledImages( maxPerStageDescriptorSampledImages_ ), maxPerStageDescriptorStorageImages( maxPerStageDescriptorStorageImages_ ), maxPerStageDescriptorInputAttachments( maxPerStageDescriptorInputAttachments_ ), maxPerStageResources( maxPerStageResources_ ), maxDescriptorSetSamplers( maxDescriptorSetSamplers_ ), maxDescriptorSetUniformBuffers( maxDescriptorSetUniformBuffers_ ), maxDescriptorSetUniformBuffersDynamic( maxDescriptorSetUniformBuffersDynamic_ ), maxDescriptorSetStorageBuffers( maxDescriptorSetStorageBuffers_ ), maxDescriptorSetStorageBuffersDynamic( maxDescriptorSetStorageBuffersDynamic_ ), maxDescriptorSetSampledImages( maxDescriptorSetSampledImages_ ), maxDescriptorSetStorageImages( maxDescriptorSetStorageImages_ ), maxDescriptorSetInputAttachments( maxDescriptorSetInputAttachments_ ), maxVertexInputAttributes( maxVertexInputAttributes_ ), maxVertexInputBindings( maxVertexInputBindings_ ), maxVertexInputAttributeOffset( maxVertexInputAttributeOffset_ ), maxVertexInputBindingStride( maxVertexInputBindingStride_ ), maxVertexOutputComponents( maxVertexOutputComponents_ ), maxTessellationGenerationLevel( maxTessellationGenerationLevel_ ), maxTessellationPatchSize( maxTessellationPatchSize_ ), maxTessellationControlPerVertexInputComponents( maxTessellationControlPerVertexInputComponents_ ), maxTessellationControlPerVertexOutputComponents( maxTessellationControlPerVertexOutputComponents_ ), maxTessellationControlPerPatchOutputComponents( maxTessellationControlPerPatchOutputComponents_ ), maxTessellationControlTotalOutputComponents( maxTessellationControlTotalOutputComponents_ ), maxTessellationEvaluationInputComponents( maxTessellationEvaluationInputComponents_ ), maxTessellationEvaluationOutputComponents( maxTessellationEvaluationOutputComponents_ ), maxGeometryShaderInvocations( maxGeometryShaderInvocations_ ), maxGeometryInputComponents( maxGeometryInputComponents_ ), maxGeometryOutputComponents( maxGeometryOutputComponents_ ), maxGeometryOutputVertices( maxGeometryOutputVertices_ ), maxGeometryTotalOutputComponents( maxGeometryTotalOutputComponents_ ), maxFragmentInputComponents( maxFragmentInputComponents_ ), maxFragmentOutputAttachments( maxFragmentOutputAttachments_ ), maxFragmentDualSrcAttachments( maxFragmentDualSrcAttachments_ ), maxFragmentCombinedOutputResources( maxFragmentCombinedOutputResources_ ), maxComputeSharedMemorySize( maxComputeSharedMemorySize_ ), maxComputeWorkGroupCount( maxComputeWorkGroupCount_ ), maxComputeWorkGroupInvocations( maxComputeWorkGroupInvocations_ ), maxComputeWorkGroupSize( maxComputeWorkGroupSize_ ), subPixelPrecisionBits( subPixelPrecisionBits_ ), subTexelPrecisionBits( subTexelPrecisionBits_ ), mipmapPrecisionBits( mipmapPrecisionBits_ ), maxDrawIndexedIndexValue( maxDrawIndexedIndexValue_ ), maxDrawIndirectCount( maxDrawIndirectCount_ ), maxSamplerLodBias( maxSamplerLodBias_ ), maxSamplerAnisotropy( maxSamplerAnisotropy_ ), maxViewports( maxViewports_ ), maxViewportDimensions( maxViewportDimensions_ ), viewportBoundsRange( viewportBoundsRange_ ), viewportSubPixelBits( viewportSubPixelBits_ ), minMemoryMapAlignment( minMemoryMapAlignment_ ), minTexelBufferOffsetAlignment( minTexelBufferOffsetAlignment_ ), minUniformBufferOffsetAlignment( minUniformBufferOffsetAlignment_ ), minStorageBufferOffsetAlignment( minStorageBufferOffsetAlignment_ ), minTexelOffset( minTexelOffset_ ), maxTexelOffset( maxTexelOffset_ ), minTexelGatherOffset( minTexelGatherOffset_ ), maxTexelGatherOffset( maxTexelGatherOffset_ ), minInterpolationOffset( minInterpolationOffset_ ), maxInterpolationOffset( maxInterpolationOffset_ ), subPixelInterpolationOffsetBits( subPixelInterpolationOffsetBits_ ), maxFramebufferWidth( maxFramebufferWidth_ ), maxFramebufferHeight( maxFramebufferHeight_ ), maxFramebufferLayers( maxFramebufferLayers_ ), framebufferColorSampleCounts( framebufferColorSampleCounts_ ), framebufferDepthSampleCounts( framebufferDepthSampleCounts_ ), framebufferStencilSampleCounts( framebufferStencilSampleCounts_ ), framebufferNoAttachmentsSampleCounts( framebufferNoAttachmentsSampleCounts_ ), maxColorAttachments( maxColorAttachments_ ), sampledImageColorSampleCounts( sampledImageColorSampleCounts_ ), sampledImageIntegerSampleCounts( sampledImageIntegerSampleCounts_ ), sampledImageDepthSampleCounts( sampledImageDepthSampleCounts_ ), sampledImageStencilSampleCounts( sampledImageStencilSampleCounts_ ), storageImageSampleCounts( storageImageSampleCounts_ ), maxSampleMaskWords( maxSampleMaskWords_ ), timestampComputeAndGraphics( timestampComputeAndGraphics_ ), timestampPeriod( timestampPeriod_ ), maxClipDistances( maxClipDistances_ ), maxCullDistances( maxCullDistances_ ), maxCombinedClipAndCullDistances( maxCombinedClipAndCullDistances_ ), discreteQueuePriorities( discreteQueuePriorities_ ), pointSizeRange( pointSizeRange_ ), lineWidthRange( lineWidthRange_ ), pointSizeGranularity( pointSizeGranularity_ ), lineWidthGranularity( lineWidthGranularity_ ), strictLines( strictLines_ ), standardSampleLocations( standardSampleLocations_ ), optimalBufferCopyOffsetAlignment( optimalBufferCopyOffsetAlignment_ ), optimalBufferCopyRowPitchAlignment( optimalBufferCopyRowPitchAlignment_ ), nonCoherentAtomSize( nonCoherentAtomSize_ )
  41658. {}
  41659. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41660. PhysicalDeviceLimits( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
  41661. : PhysicalDeviceLimits( *reinterpret_cast<PhysicalDeviceLimits const *>( &rhs ) )
  41662. {}
  41663. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41664. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLimits & operator=( PhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41665. PhysicalDeviceLimits & operator=( VkPhysicalDeviceLimits const & rhs ) VULKAN_HPP_NOEXCEPT
  41666. {
  41667. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits const *>( &rhs );
  41668. return *this;
  41669. }
  41670. operator VkPhysicalDeviceLimits const&() const VULKAN_HPP_NOEXCEPT
  41671. {
  41672. return *reinterpret_cast<const VkPhysicalDeviceLimits*>( this );
  41673. }
  41674. operator VkPhysicalDeviceLimits &() VULKAN_HPP_NOEXCEPT
  41675. {
  41676. return *reinterpret_cast<VkPhysicalDeviceLimits*>( this );
  41677. }
  41678. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41679. auto operator<=>( PhysicalDeviceLimits const& ) const = default;
  41680. #else
  41681. bool operator==( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
  41682. {
  41683. return ( maxImageDimension1D == rhs.maxImageDimension1D )
  41684. && ( maxImageDimension2D == rhs.maxImageDimension2D )
  41685. && ( maxImageDimension3D == rhs.maxImageDimension3D )
  41686. && ( maxImageDimensionCube == rhs.maxImageDimensionCube )
  41687. && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
  41688. && ( maxTexelBufferElements == rhs.maxTexelBufferElements )
  41689. && ( maxUniformBufferRange == rhs.maxUniformBufferRange )
  41690. && ( maxStorageBufferRange == rhs.maxStorageBufferRange )
  41691. && ( maxPushConstantsSize == rhs.maxPushConstantsSize )
  41692. && ( maxMemoryAllocationCount == rhs.maxMemoryAllocationCount )
  41693. && ( maxSamplerAllocationCount == rhs.maxSamplerAllocationCount )
  41694. && ( bufferImageGranularity == rhs.bufferImageGranularity )
  41695. && ( sparseAddressSpaceSize == rhs.sparseAddressSpaceSize )
  41696. && ( maxBoundDescriptorSets == rhs.maxBoundDescriptorSets )
  41697. && ( maxPerStageDescriptorSamplers == rhs.maxPerStageDescriptorSamplers )
  41698. && ( maxPerStageDescriptorUniformBuffers == rhs.maxPerStageDescriptorUniformBuffers )
  41699. && ( maxPerStageDescriptorStorageBuffers == rhs.maxPerStageDescriptorStorageBuffers )
  41700. && ( maxPerStageDescriptorSampledImages == rhs.maxPerStageDescriptorSampledImages )
  41701. && ( maxPerStageDescriptorStorageImages == rhs.maxPerStageDescriptorStorageImages )
  41702. && ( maxPerStageDescriptorInputAttachments == rhs.maxPerStageDescriptorInputAttachments )
  41703. && ( maxPerStageResources == rhs.maxPerStageResources )
  41704. && ( maxDescriptorSetSamplers == rhs.maxDescriptorSetSamplers )
  41705. && ( maxDescriptorSetUniformBuffers == rhs.maxDescriptorSetUniformBuffers )
  41706. && ( maxDescriptorSetUniformBuffersDynamic == rhs.maxDescriptorSetUniformBuffersDynamic )
  41707. && ( maxDescriptorSetStorageBuffers == rhs.maxDescriptorSetStorageBuffers )
  41708. && ( maxDescriptorSetStorageBuffersDynamic == rhs.maxDescriptorSetStorageBuffersDynamic )
  41709. && ( maxDescriptorSetSampledImages == rhs.maxDescriptorSetSampledImages )
  41710. && ( maxDescriptorSetStorageImages == rhs.maxDescriptorSetStorageImages )
  41711. && ( maxDescriptorSetInputAttachments == rhs.maxDescriptorSetInputAttachments )
  41712. && ( maxVertexInputAttributes == rhs.maxVertexInputAttributes )
  41713. && ( maxVertexInputBindings == rhs.maxVertexInputBindings )
  41714. && ( maxVertexInputAttributeOffset == rhs.maxVertexInputAttributeOffset )
  41715. && ( maxVertexInputBindingStride == rhs.maxVertexInputBindingStride )
  41716. && ( maxVertexOutputComponents == rhs.maxVertexOutputComponents )
  41717. && ( maxTessellationGenerationLevel == rhs.maxTessellationGenerationLevel )
  41718. && ( maxTessellationPatchSize == rhs.maxTessellationPatchSize )
  41719. && ( maxTessellationControlPerVertexInputComponents == rhs.maxTessellationControlPerVertexInputComponents )
  41720. && ( maxTessellationControlPerVertexOutputComponents == rhs.maxTessellationControlPerVertexOutputComponents )
  41721. && ( maxTessellationControlPerPatchOutputComponents == rhs.maxTessellationControlPerPatchOutputComponents )
  41722. && ( maxTessellationControlTotalOutputComponents == rhs.maxTessellationControlTotalOutputComponents )
  41723. && ( maxTessellationEvaluationInputComponents == rhs.maxTessellationEvaluationInputComponents )
  41724. && ( maxTessellationEvaluationOutputComponents == rhs.maxTessellationEvaluationOutputComponents )
  41725. && ( maxGeometryShaderInvocations == rhs.maxGeometryShaderInvocations )
  41726. && ( maxGeometryInputComponents == rhs.maxGeometryInputComponents )
  41727. && ( maxGeometryOutputComponents == rhs.maxGeometryOutputComponents )
  41728. && ( maxGeometryOutputVertices == rhs.maxGeometryOutputVertices )
  41729. && ( maxGeometryTotalOutputComponents == rhs.maxGeometryTotalOutputComponents )
  41730. && ( maxFragmentInputComponents == rhs.maxFragmentInputComponents )
  41731. && ( maxFragmentOutputAttachments == rhs.maxFragmentOutputAttachments )
  41732. && ( maxFragmentDualSrcAttachments == rhs.maxFragmentDualSrcAttachments )
  41733. && ( maxFragmentCombinedOutputResources == rhs.maxFragmentCombinedOutputResources )
  41734. && ( maxComputeSharedMemorySize == rhs.maxComputeSharedMemorySize )
  41735. && ( maxComputeWorkGroupCount == rhs.maxComputeWorkGroupCount )
  41736. && ( maxComputeWorkGroupInvocations == rhs.maxComputeWorkGroupInvocations )
  41737. && ( maxComputeWorkGroupSize == rhs.maxComputeWorkGroupSize )
  41738. && ( subPixelPrecisionBits == rhs.subPixelPrecisionBits )
  41739. && ( subTexelPrecisionBits == rhs.subTexelPrecisionBits )
  41740. && ( mipmapPrecisionBits == rhs.mipmapPrecisionBits )
  41741. && ( maxDrawIndexedIndexValue == rhs.maxDrawIndexedIndexValue )
  41742. && ( maxDrawIndirectCount == rhs.maxDrawIndirectCount )
  41743. && ( maxSamplerLodBias == rhs.maxSamplerLodBias )
  41744. && ( maxSamplerAnisotropy == rhs.maxSamplerAnisotropy )
  41745. && ( maxViewports == rhs.maxViewports )
  41746. && ( maxViewportDimensions == rhs.maxViewportDimensions )
  41747. && ( viewportBoundsRange == rhs.viewportBoundsRange )
  41748. && ( viewportSubPixelBits == rhs.viewportSubPixelBits )
  41749. && ( minMemoryMapAlignment == rhs.minMemoryMapAlignment )
  41750. && ( minTexelBufferOffsetAlignment == rhs.minTexelBufferOffsetAlignment )
  41751. && ( minUniformBufferOffsetAlignment == rhs.minUniformBufferOffsetAlignment )
  41752. && ( minStorageBufferOffsetAlignment == rhs.minStorageBufferOffsetAlignment )
  41753. && ( minTexelOffset == rhs.minTexelOffset )
  41754. && ( maxTexelOffset == rhs.maxTexelOffset )
  41755. && ( minTexelGatherOffset == rhs.minTexelGatherOffset )
  41756. && ( maxTexelGatherOffset == rhs.maxTexelGatherOffset )
  41757. && ( minInterpolationOffset == rhs.minInterpolationOffset )
  41758. && ( maxInterpolationOffset == rhs.maxInterpolationOffset )
  41759. && ( subPixelInterpolationOffsetBits == rhs.subPixelInterpolationOffsetBits )
  41760. && ( maxFramebufferWidth == rhs.maxFramebufferWidth )
  41761. && ( maxFramebufferHeight == rhs.maxFramebufferHeight )
  41762. && ( maxFramebufferLayers == rhs.maxFramebufferLayers )
  41763. && ( framebufferColorSampleCounts == rhs.framebufferColorSampleCounts )
  41764. && ( framebufferDepthSampleCounts == rhs.framebufferDepthSampleCounts )
  41765. && ( framebufferStencilSampleCounts == rhs.framebufferStencilSampleCounts )
  41766. && ( framebufferNoAttachmentsSampleCounts == rhs.framebufferNoAttachmentsSampleCounts )
  41767. && ( maxColorAttachments == rhs.maxColorAttachments )
  41768. && ( sampledImageColorSampleCounts == rhs.sampledImageColorSampleCounts )
  41769. && ( sampledImageIntegerSampleCounts == rhs.sampledImageIntegerSampleCounts )
  41770. && ( sampledImageDepthSampleCounts == rhs.sampledImageDepthSampleCounts )
  41771. && ( sampledImageStencilSampleCounts == rhs.sampledImageStencilSampleCounts )
  41772. && ( storageImageSampleCounts == rhs.storageImageSampleCounts )
  41773. && ( maxSampleMaskWords == rhs.maxSampleMaskWords )
  41774. && ( timestampComputeAndGraphics == rhs.timestampComputeAndGraphics )
  41775. && ( timestampPeriod == rhs.timestampPeriod )
  41776. && ( maxClipDistances == rhs.maxClipDistances )
  41777. && ( maxCullDistances == rhs.maxCullDistances )
  41778. && ( maxCombinedClipAndCullDistances == rhs.maxCombinedClipAndCullDistances )
  41779. && ( discreteQueuePriorities == rhs.discreteQueuePriorities )
  41780. && ( pointSizeRange == rhs.pointSizeRange )
  41781. && ( lineWidthRange == rhs.lineWidthRange )
  41782. && ( pointSizeGranularity == rhs.pointSizeGranularity )
  41783. && ( lineWidthGranularity == rhs.lineWidthGranularity )
  41784. && ( strictLines == rhs.strictLines )
  41785. && ( standardSampleLocations == rhs.standardSampleLocations )
  41786. && ( optimalBufferCopyOffsetAlignment == rhs.optimalBufferCopyOffsetAlignment )
  41787. && ( optimalBufferCopyRowPitchAlignment == rhs.optimalBufferCopyRowPitchAlignment )
  41788. && ( nonCoherentAtomSize == rhs.nonCoherentAtomSize );
  41789. }
  41790. bool operator!=( PhysicalDeviceLimits const& rhs ) const VULKAN_HPP_NOEXCEPT
  41791. {
  41792. return !operator==( rhs );
  41793. }
  41794. #endif
  41795. public:
  41796. uint32_t maxImageDimension1D = {};
  41797. uint32_t maxImageDimension2D = {};
  41798. uint32_t maxImageDimension3D = {};
  41799. uint32_t maxImageDimensionCube = {};
  41800. uint32_t maxImageArrayLayers = {};
  41801. uint32_t maxTexelBufferElements = {};
  41802. uint32_t maxUniformBufferRange = {};
  41803. uint32_t maxStorageBufferRange = {};
  41804. uint32_t maxPushConstantsSize = {};
  41805. uint32_t maxMemoryAllocationCount = {};
  41806. uint32_t maxSamplerAllocationCount = {};
  41807. VULKAN_HPP_NAMESPACE::DeviceSize bufferImageGranularity = {};
  41808. VULKAN_HPP_NAMESPACE::DeviceSize sparseAddressSpaceSize = {};
  41809. uint32_t maxBoundDescriptorSets = {};
  41810. uint32_t maxPerStageDescriptorSamplers = {};
  41811. uint32_t maxPerStageDescriptorUniformBuffers = {};
  41812. uint32_t maxPerStageDescriptorStorageBuffers = {};
  41813. uint32_t maxPerStageDescriptorSampledImages = {};
  41814. uint32_t maxPerStageDescriptorStorageImages = {};
  41815. uint32_t maxPerStageDescriptorInputAttachments = {};
  41816. uint32_t maxPerStageResources = {};
  41817. uint32_t maxDescriptorSetSamplers = {};
  41818. uint32_t maxDescriptorSetUniformBuffers = {};
  41819. uint32_t maxDescriptorSetUniformBuffersDynamic = {};
  41820. uint32_t maxDescriptorSetStorageBuffers = {};
  41821. uint32_t maxDescriptorSetStorageBuffersDynamic = {};
  41822. uint32_t maxDescriptorSetSampledImages = {};
  41823. uint32_t maxDescriptorSetStorageImages = {};
  41824. uint32_t maxDescriptorSetInputAttachments = {};
  41825. uint32_t maxVertexInputAttributes = {};
  41826. uint32_t maxVertexInputBindings = {};
  41827. uint32_t maxVertexInputAttributeOffset = {};
  41828. uint32_t maxVertexInputBindingStride = {};
  41829. uint32_t maxVertexOutputComponents = {};
  41830. uint32_t maxTessellationGenerationLevel = {};
  41831. uint32_t maxTessellationPatchSize = {};
  41832. uint32_t maxTessellationControlPerVertexInputComponents = {};
  41833. uint32_t maxTessellationControlPerVertexOutputComponents = {};
  41834. uint32_t maxTessellationControlPerPatchOutputComponents = {};
  41835. uint32_t maxTessellationControlTotalOutputComponents = {};
  41836. uint32_t maxTessellationEvaluationInputComponents = {};
  41837. uint32_t maxTessellationEvaluationOutputComponents = {};
  41838. uint32_t maxGeometryShaderInvocations = {};
  41839. uint32_t maxGeometryInputComponents = {};
  41840. uint32_t maxGeometryOutputComponents = {};
  41841. uint32_t maxGeometryOutputVertices = {};
  41842. uint32_t maxGeometryTotalOutputComponents = {};
  41843. uint32_t maxFragmentInputComponents = {};
  41844. uint32_t maxFragmentOutputAttachments = {};
  41845. uint32_t maxFragmentDualSrcAttachments = {};
  41846. uint32_t maxFragmentCombinedOutputResources = {};
  41847. uint32_t maxComputeSharedMemorySize = {};
  41848. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupCount = {};
  41849. uint32_t maxComputeWorkGroupInvocations = {};
  41850. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxComputeWorkGroupSize = {};
  41851. uint32_t subPixelPrecisionBits = {};
  41852. uint32_t subTexelPrecisionBits = {};
  41853. uint32_t mipmapPrecisionBits = {};
  41854. uint32_t maxDrawIndexedIndexValue = {};
  41855. uint32_t maxDrawIndirectCount = {};
  41856. float maxSamplerLodBias = {};
  41857. float maxSamplerAnisotropy = {};
  41858. uint32_t maxViewports = {};
  41859. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 2> maxViewportDimensions = {};
  41860. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> viewportBoundsRange = {};
  41861. uint32_t viewportSubPixelBits = {};
  41862. size_t minMemoryMapAlignment = {};
  41863. VULKAN_HPP_NAMESPACE::DeviceSize minTexelBufferOffsetAlignment = {};
  41864. VULKAN_HPP_NAMESPACE::DeviceSize minUniformBufferOffsetAlignment = {};
  41865. VULKAN_HPP_NAMESPACE::DeviceSize minStorageBufferOffsetAlignment = {};
  41866. int32_t minTexelOffset = {};
  41867. uint32_t maxTexelOffset = {};
  41868. int32_t minTexelGatherOffset = {};
  41869. uint32_t maxTexelGatherOffset = {};
  41870. float minInterpolationOffset = {};
  41871. float maxInterpolationOffset = {};
  41872. uint32_t subPixelInterpolationOffsetBits = {};
  41873. uint32_t maxFramebufferWidth = {};
  41874. uint32_t maxFramebufferHeight = {};
  41875. uint32_t maxFramebufferLayers = {};
  41876. VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferColorSampleCounts = {};
  41877. VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferDepthSampleCounts = {};
  41878. VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferStencilSampleCounts = {};
  41879. VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferNoAttachmentsSampleCounts = {};
  41880. uint32_t maxColorAttachments = {};
  41881. VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageColorSampleCounts = {};
  41882. VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageIntegerSampleCounts = {};
  41883. VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageDepthSampleCounts = {};
  41884. VULKAN_HPP_NAMESPACE::SampleCountFlags sampledImageStencilSampleCounts = {};
  41885. VULKAN_HPP_NAMESPACE::SampleCountFlags storageImageSampleCounts = {};
  41886. uint32_t maxSampleMaskWords = {};
  41887. VULKAN_HPP_NAMESPACE::Bool32 timestampComputeAndGraphics = {};
  41888. float timestampPeriod = {};
  41889. uint32_t maxClipDistances = {};
  41890. uint32_t maxCullDistances = {};
  41891. uint32_t maxCombinedClipAndCullDistances = {};
  41892. uint32_t discreteQueuePriorities = {};
  41893. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> pointSizeRange = {};
  41894. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> lineWidthRange = {};
  41895. float pointSizeGranularity = {};
  41896. float lineWidthGranularity = {};
  41897. VULKAN_HPP_NAMESPACE::Bool32 strictLines = {};
  41898. VULKAN_HPP_NAMESPACE::Bool32 standardSampleLocations = {};
  41899. VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyOffsetAlignment = {};
  41900. VULKAN_HPP_NAMESPACE::DeviceSize optimalBufferCopyRowPitchAlignment = {};
  41901. VULKAN_HPP_NAMESPACE::DeviceSize nonCoherentAtomSize = {};
  41902. };
  41903. static_assert( sizeof( PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
  41904. static_assert( std::is_standard_layout<PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
  41905. struct PhysicalDeviceSparseProperties
  41906. {
  41907. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41908. VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties(VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict_ = {}) VULKAN_HPP_NOEXCEPT
  41909. : residencyStandard2DBlockShape( residencyStandard2DBlockShape_ ), residencyStandard2DMultisampleBlockShape( residencyStandard2DMultisampleBlockShape_ ), residencyStandard3DBlockShape( residencyStandard3DBlockShape_ ), residencyAlignedMipSize( residencyAlignedMipSize_ ), residencyNonResidentStrict( residencyNonResidentStrict_ )
  41910. {}
  41911. VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseProperties( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41912. PhysicalDeviceSparseProperties( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41913. : PhysicalDeviceSparseProperties( *reinterpret_cast<PhysicalDeviceSparseProperties const *>( &rhs ) )
  41914. {}
  41915. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41916. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseProperties & operator=( PhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41917. PhysicalDeviceSparseProperties & operator=( VkPhysicalDeviceSparseProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41918. {
  41919. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties const *>( &rhs );
  41920. return *this;
  41921. }
  41922. operator VkPhysicalDeviceSparseProperties const&() const VULKAN_HPP_NOEXCEPT
  41923. {
  41924. return *reinterpret_cast<const VkPhysicalDeviceSparseProperties*>( this );
  41925. }
  41926. operator VkPhysicalDeviceSparseProperties &() VULKAN_HPP_NOEXCEPT
  41927. {
  41928. return *reinterpret_cast<VkPhysicalDeviceSparseProperties*>( this );
  41929. }
  41930. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41931. auto operator<=>( PhysicalDeviceSparseProperties const& ) const = default;
  41932. #else
  41933. bool operator==( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41934. {
  41935. return ( residencyStandard2DBlockShape == rhs.residencyStandard2DBlockShape )
  41936. && ( residencyStandard2DMultisampleBlockShape == rhs.residencyStandard2DMultisampleBlockShape )
  41937. && ( residencyStandard3DBlockShape == rhs.residencyStandard3DBlockShape )
  41938. && ( residencyAlignedMipSize == rhs.residencyAlignedMipSize )
  41939. && ( residencyNonResidentStrict == rhs.residencyNonResidentStrict );
  41940. }
  41941. bool operator!=( PhysicalDeviceSparseProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41942. {
  41943. return !operator==( rhs );
  41944. }
  41945. #endif
  41946. public:
  41947. VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DBlockShape = {};
  41948. VULKAN_HPP_NAMESPACE::Bool32 residencyStandard2DMultisampleBlockShape = {};
  41949. VULKAN_HPP_NAMESPACE::Bool32 residencyStandard3DBlockShape = {};
  41950. VULKAN_HPP_NAMESPACE::Bool32 residencyAlignedMipSize = {};
  41951. VULKAN_HPP_NAMESPACE::Bool32 residencyNonResidentStrict = {};
  41952. };
  41953. static_assert( sizeof( PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ), "struct and wrapper have different size!" );
  41954. static_assert( std::is_standard_layout<PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
  41955. struct PhysicalDeviceProperties
  41956. {
  41957. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41958. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties(uint32_t apiVersion_ = {}, uint32_t driverVersion_ = {}, uint32_t vendorID_ = {}, uint32_t deviceID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType_ = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther, std::array<char,VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> const& deviceName_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& pipelineCacheUUID_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits_ = {}, VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties_ = {}) VULKAN_HPP_NOEXCEPT
  41959. : apiVersion( apiVersion_ ), driverVersion( driverVersion_ ), vendorID( vendorID_ ), deviceID( deviceID_ ), deviceType( deviceType_ ), deviceName( deviceName_ ), pipelineCacheUUID( pipelineCacheUUID_ ), limits( limits_ ), sparseProperties( sparseProperties_ )
  41960. {}
  41961. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41962. PhysicalDeviceProperties( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41963. : PhysicalDeviceProperties( *reinterpret_cast<PhysicalDeviceProperties const *>( &rhs ) )
  41964. {}
  41965. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  41966. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties & operator=( PhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  41967. PhysicalDeviceProperties & operator=( VkPhysicalDeviceProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  41968. {
  41969. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties const *>( &rhs );
  41970. return *this;
  41971. }
  41972. operator VkPhysicalDeviceProperties const&() const VULKAN_HPP_NOEXCEPT
  41973. {
  41974. return *reinterpret_cast<const VkPhysicalDeviceProperties*>( this );
  41975. }
  41976. operator VkPhysicalDeviceProperties &() VULKAN_HPP_NOEXCEPT
  41977. {
  41978. return *reinterpret_cast<VkPhysicalDeviceProperties*>( this );
  41979. }
  41980. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  41981. auto operator<=>( PhysicalDeviceProperties const& ) const = default;
  41982. #else
  41983. bool operator==( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41984. {
  41985. return ( apiVersion == rhs.apiVersion )
  41986. && ( driverVersion == rhs.driverVersion )
  41987. && ( vendorID == rhs.vendorID )
  41988. && ( deviceID == rhs.deviceID )
  41989. && ( deviceType == rhs.deviceType )
  41990. && ( deviceName == rhs.deviceName )
  41991. && ( pipelineCacheUUID == rhs.pipelineCacheUUID )
  41992. && ( limits == rhs.limits )
  41993. && ( sparseProperties == rhs.sparseProperties );
  41994. }
  41995. bool operator!=( PhysicalDeviceProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  41996. {
  41997. return !operator==( rhs );
  41998. }
  41999. #endif
  42000. public:
  42001. uint32_t apiVersion = {};
  42002. uint32_t driverVersion = {};
  42003. uint32_t vendorID = {};
  42004. uint32_t deviceID = {};
  42005. VULKAN_HPP_NAMESPACE::PhysicalDeviceType deviceType = VULKAN_HPP_NAMESPACE::PhysicalDeviceType::eOther;
  42006. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_PHYSICAL_DEVICE_NAME_SIZE> deviceName = {};
  42007. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> pipelineCacheUUID = {};
  42008. VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits limits = {};
  42009. VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties sparseProperties = {};
  42010. };
  42011. static_assert( sizeof( PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ), "struct and wrapper have different size!" );
  42012. static_assert( std::is_standard_layout<PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
  42013. struct PhysicalDeviceProperties2
  42014. {
  42015. static const bool allowDuplicate = false;
  42016. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProperties2;
  42017. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42018. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2(VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
  42019. : properties( properties_ )
  42020. {}
  42021. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42022. PhysicalDeviceProperties2( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42023. : PhysicalDeviceProperties2( *reinterpret_cast<PhysicalDeviceProperties2 const *>( &rhs ) )
  42024. {}
  42025. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42026. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProperties2 & operator=( PhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42027. PhysicalDeviceProperties2 & operator=( VkPhysicalDeviceProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42028. {
  42029. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 const *>( &rhs );
  42030. return *this;
  42031. }
  42032. operator VkPhysicalDeviceProperties2 const&() const VULKAN_HPP_NOEXCEPT
  42033. {
  42034. return *reinterpret_cast<const VkPhysicalDeviceProperties2*>( this );
  42035. }
  42036. operator VkPhysicalDeviceProperties2 &() VULKAN_HPP_NOEXCEPT
  42037. {
  42038. return *reinterpret_cast<VkPhysicalDeviceProperties2*>( this );
  42039. }
  42040. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42041. auto operator<=>( PhysicalDeviceProperties2 const& ) const = default;
  42042. #else
  42043. bool operator==( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42044. {
  42045. return ( sType == rhs.sType )
  42046. && ( pNext == rhs.pNext )
  42047. && ( properties == rhs.properties );
  42048. }
  42049. bool operator!=( PhysicalDeviceProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42050. {
  42051. return !operator==( rhs );
  42052. }
  42053. #endif
  42054. public:
  42055. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProperties2;
  42056. void* pNext = {};
  42057. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties = {};
  42058. };
  42059. static_assert( sizeof( PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ), "struct and wrapper have different size!" );
  42060. static_assert( std::is_standard_layout<PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
  42061. template <>
  42062. struct CppType<StructureType, StructureType::ePhysicalDeviceProperties2>
  42063. {
  42064. using Type = PhysicalDeviceProperties2;
  42065. };
  42066. using PhysicalDeviceProperties2KHR = PhysicalDeviceProperties2;
  42067. struct QueryPoolPerformanceCreateInfoKHR
  42068. {
  42069. static const bool allowDuplicate = false;
  42070. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
  42071. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42072. VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR(uint32_t queueFamilyIndex_ = {}, uint32_t counterIndexCount_ = {}, const uint32_t* pCounterIndices_ = {}) VULKAN_HPP_NOEXCEPT
  42073. : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( counterIndexCount_ ), pCounterIndices( pCounterIndices_ )
  42074. {}
  42075. VULKAN_HPP_CONSTEXPR QueryPoolPerformanceCreateInfoKHR( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42076. QueryPoolPerformanceCreateInfoKHR( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42077. : QueryPoolPerformanceCreateInfoKHR( *reinterpret_cast<QueryPoolPerformanceCreateInfoKHR const *>( &rhs ) )
  42078. {}
  42079. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  42080. QueryPoolPerformanceCreateInfoKHR( uint32_t queueFamilyIndex_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ )
  42081. : queueFamilyIndex( queueFamilyIndex_ ), counterIndexCount( static_cast<uint32_t>( counterIndices_.size() ) ), pCounterIndices( counterIndices_.data() )
  42082. {}
  42083. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  42084. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42085. VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceCreateInfoKHR & operator=( QueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42086. QueryPoolPerformanceCreateInfoKHR & operator=( VkQueryPoolPerformanceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42087. {
  42088. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR const *>( &rhs );
  42089. return *this;
  42090. }
  42091. QueryPoolPerformanceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  42092. {
  42093. pNext = pNext_;
  42094. return *this;
  42095. }
  42096. QueryPoolPerformanceCreateInfoKHR & setQueueFamilyIndex( uint32_t queueFamilyIndex_ ) VULKAN_HPP_NOEXCEPT
  42097. {
  42098. queueFamilyIndex = queueFamilyIndex_;
  42099. return *this;
  42100. }
  42101. QueryPoolPerformanceCreateInfoKHR & setCounterIndexCount( uint32_t counterIndexCount_ ) VULKAN_HPP_NOEXCEPT
  42102. {
  42103. counterIndexCount = counterIndexCount_;
  42104. return *this;
  42105. }
  42106. QueryPoolPerformanceCreateInfoKHR & setPCounterIndices( const uint32_t* pCounterIndices_ ) VULKAN_HPP_NOEXCEPT
  42107. {
  42108. pCounterIndices = pCounterIndices_;
  42109. return *this;
  42110. }
  42111. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  42112. QueryPoolPerformanceCreateInfoKHR & setCounterIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & counterIndices_ ) VULKAN_HPP_NOEXCEPT
  42113. {
  42114. counterIndexCount = static_cast<uint32_t>( counterIndices_.size() );
  42115. pCounterIndices = counterIndices_.data();
  42116. return *this;
  42117. }
  42118. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  42119. operator VkQueryPoolPerformanceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  42120. {
  42121. return *reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR*>( this );
  42122. }
  42123. operator VkQueryPoolPerformanceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  42124. {
  42125. return *reinterpret_cast<VkQueryPoolPerformanceCreateInfoKHR*>( this );
  42126. }
  42127. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42128. auto operator<=>( QueryPoolPerformanceCreateInfoKHR const& ) const = default;
  42129. #else
  42130. bool operator==( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42131. {
  42132. return ( sType == rhs.sType )
  42133. && ( pNext == rhs.pNext )
  42134. && ( queueFamilyIndex == rhs.queueFamilyIndex )
  42135. && ( counterIndexCount == rhs.counterIndexCount )
  42136. && ( pCounterIndices == rhs.pCounterIndices );
  42137. }
  42138. bool operator!=( QueryPoolPerformanceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42139. {
  42140. return !operator==( rhs );
  42141. }
  42142. #endif
  42143. public:
  42144. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceCreateInfoKHR;
  42145. const void* pNext = {};
  42146. uint32_t queueFamilyIndex = {};
  42147. uint32_t counterIndexCount = {};
  42148. const uint32_t* pCounterIndices = {};
  42149. };
  42150. static_assert( sizeof( QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ), "struct and wrapper have different size!" );
  42151. static_assert( std::is_standard_layout<QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  42152. template <>
  42153. struct CppType<StructureType, StructureType::eQueryPoolPerformanceCreateInfoKHR>
  42154. {
  42155. using Type = QueryPoolPerformanceCreateInfoKHR;
  42156. };
  42157. struct QueueFamilyProperties
  42158. {
  42159. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42160. VULKAN_HPP_CONSTEXPR QueueFamilyProperties(VULKAN_HPP_NAMESPACE::QueueFlags queueFlags_ = {}, uint32_t queueCount_ = {}, uint32_t timestampValidBits_ = {}, VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity_ = {}) VULKAN_HPP_NOEXCEPT
  42161. : queueFlags( queueFlags_ ), queueCount( queueCount_ ), timestampValidBits( timestampValidBits_ ), minImageTransferGranularity( minImageTransferGranularity_ )
  42162. {}
  42163. VULKAN_HPP_CONSTEXPR QueueFamilyProperties( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42164. QueueFamilyProperties( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  42165. : QueueFamilyProperties( *reinterpret_cast<QueueFamilyProperties const *>( &rhs ) )
  42166. {}
  42167. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42168. VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties & operator=( QueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42169. QueueFamilyProperties & operator=( VkQueueFamilyProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  42170. {
  42171. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties const *>( &rhs );
  42172. return *this;
  42173. }
  42174. operator VkQueueFamilyProperties const&() const VULKAN_HPP_NOEXCEPT
  42175. {
  42176. return *reinterpret_cast<const VkQueueFamilyProperties*>( this );
  42177. }
  42178. operator VkQueueFamilyProperties &() VULKAN_HPP_NOEXCEPT
  42179. {
  42180. return *reinterpret_cast<VkQueueFamilyProperties*>( this );
  42181. }
  42182. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42183. auto operator<=>( QueueFamilyProperties const& ) const = default;
  42184. #else
  42185. bool operator==( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  42186. {
  42187. return ( queueFlags == rhs.queueFlags )
  42188. && ( queueCount == rhs.queueCount )
  42189. && ( timestampValidBits == rhs.timestampValidBits )
  42190. && ( minImageTransferGranularity == rhs.minImageTransferGranularity );
  42191. }
  42192. bool operator!=( QueueFamilyProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  42193. {
  42194. return !operator==( rhs );
  42195. }
  42196. #endif
  42197. public:
  42198. VULKAN_HPP_NAMESPACE::QueueFlags queueFlags = {};
  42199. uint32_t queueCount = {};
  42200. uint32_t timestampValidBits = {};
  42201. VULKAN_HPP_NAMESPACE::Extent3D minImageTransferGranularity = {};
  42202. };
  42203. static_assert( sizeof( QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ), "struct and wrapper have different size!" );
  42204. static_assert( std::is_standard_layout<QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
  42205. struct QueueFamilyProperties2
  42206. {
  42207. static const bool allowDuplicate = false;
  42208. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyProperties2;
  42209. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42210. VULKAN_HPP_CONSTEXPR QueueFamilyProperties2(VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties_ = {}) VULKAN_HPP_NOEXCEPT
  42211. : queueFamilyProperties( queueFamilyProperties_ )
  42212. {}
  42213. VULKAN_HPP_CONSTEXPR QueueFamilyProperties2( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42214. QueueFamilyProperties2( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42215. : QueueFamilyProperties2( *reinterpret_cast<QueueFamilyProperties2 const *>( &rhs ) )
  42216. {}
  42217. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42218. VULKAN_HPP_CONSTEXPR_14 QueueFamilyProperties2 & operator=( QueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42219. QueueFamilyProperties2 & operator=( VkQueueFamilyProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42220. {
  42221. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 const *>( &rhs );
  42222. return *this;
  42223. }
  42224. operator VkQueueFamilyProperties2 const&() const VULKAN_HPP_NOEXCEPT
  42225. {
  42226. return *reinterpret_cast<const VkQueueFamilyProperties2*>( this );
  42227. }
  42228. operator VkQueueFamilyProperties2 &() VULKAN_HPP_NOEXCEPT
  42229. {
  42230. return *reinterpret_cast<VkQueueFamilyProperties2*>( this );
  42231. }
  42232. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42233. auto operator<=>( QueueFamilyProperties2 const& ) const = default;
  42234. #else
  42235. bool operator==( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42236. {
  42237. return ( sType == rhs.sType )
  42238. && ( pNext == rhs.pNext )
  42239. && ( queueFamilyProperties == rhs.queueFamilyProperties );
  42240. }
  42241. bool operator!=( QueueFamilyProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42242. {
  42243. return !operator==( rhs );
  42244. }
  42245. #endif
  42246. public:
  42247. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyProperties2;
  42248. void* pNext = {};
  42249. VULKAN_HPP_NAMESPACE::QueueFamilyProperties queueFamilyProperties = {};
  42250. };
  42251. static_assert( sizeof( QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ), "struct and wrapper have different size!" );
  42252. static_assert( std::is_standard_layout<QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
  42253. template <>
  42254. struct CppType<StructureType, StructureType::eQueueFamilyProperties2>
  42255. {
  42256. using Type = QueueFamilyProperties2;
  42257. };
  42258. using QueueFamilyProperties2KHR = QueueFamilyProperties2;
  42259. struct PhysicalDeviceSparseImageFormatInfo2
  42260. {
  42261. static const bool allowDuplicate = false;
  42262. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
  42263. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42264. VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ImageType type_ = VULKAN_HPP_NAMESPACE::ImageType::e1D, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, VULKAN_HPP_NAMESPACE::ImageTiling tiling_ = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal) VULKAN_HPP_NOEXCEPT
  42265. : format( format_ ), type( type_ ), samples( samples_ ), usage( usage_ ), tiling( tiling_ )
  42266. {}
  42267. VULKAN_HPP_CONSTEXPR PhysicalDeviceSparseImageFormatInfo2( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42268. PhysicalDeviceSparseImageFormatInfo2( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42269. : PhysicalDeviceSparseImageFormatInfo2( *reinterpret_cast<PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs ) )
  42270. {}
  42271. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42272. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSparseImageFormatInfo2 & operator=( PhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42273. PhysicalDeviceSparseImageFormatInfo2 & operator=( VkPhysicalDeviceSparseImageFormatInfo2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42274. {
  42275. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 const *>( &rhs );
  42276. return *this;
  42277. }
  42278. PhysicalDeviceSparseImageFormatInfo2 & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  42279. {
  42280. pNext = pNext_;
  42281. return *this;
  42282. }
  42283. PhysicalDeviceSparseImageFormatInfo2 & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  42284. {
  42285. format = format_;
  42286. return *this;
  42287. }
  42288. PhysicalDeviceSparseImageFormatInfo2 & setType( VULKAN_HPP_NAMESPACE::ImageType type_ ) VULKAN_HPP_NOEXCEPT
  42289. {
  42290. type = type_;
  42291. return *this;
  42292. }
  42293. PhysicalDeviceSparseImageFormatInfo2 & setSamples( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples_ ) VULKAN_HPP_NOEXCEPT
  42294. {
  42295. samples = samples_;
  42296. return *this;
  42297. }
  42298. PhysicalDeviceSparseImageFormatInfo2 & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  42299. {
  42300. usage = usage_;
  42301. return *this;
  42302. }
  42303. PhysicalDeviceSparseImageFormatInfo2 & setTiling( VULKAN_HPP_NAMESPACE::ImageTiling tiling_ ) VULKAN_HPP_NOEXCEPT
  42304. {
  42305. tiling = tiling_;
  42306. return *this;
  42307. }
  42308. operator VkPhysicalDeviceSparseImageFormatInfo2 const&() const VULKAN_HPP_NOEXCEPT
  42309. {
  42310. return *reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2*>( this );
  42311. }
  42312. operator VkPhysicalDeviceSparseImageFormatInfo2 &() VULKAN_HPP_NOEXCEPT
  42313. {
  42314. return *reinterpret_cast<VkPhysicalDeviceSparseImageFormatInfo2*>( this );
  42315. }
  42316. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42317. auto operator<=>( PhysicalDeviceSparseImageFormatInfo2 const& ) const = default;
  42318. #else
  42319. bool operator==( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42320. {
  42321. return ( sType == rhs.sType )
  42322. && ( pNext == rhs.pNext )
  42323. && ( format == rhs.format )
  42324. && ( type == rhs.type )
  42325. && ( samples == rhs.samples )
  42326. && ( usage == rhs.usage )
  42327. && ( tiling == rhs.tiling );
  42328. }
  42329. bool operator!=( PhysicalDeviceSparseImageFormatInfo2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42330. {
  42331. return !operator==( rhs );
  42332. }
  42333. #endif
  42334. public:
  42335. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSparseImageFormatInfo2;
  42336. const void* pNext = {};
  42337. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  42338. VULKAN_HPP_NAMESPACE::ImageType type = VULKAN_HPP_NAMESPACE::ImageType::e1D;
  42339. VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  42340. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  42341. VULKAN_HPP_NAMESPACE::ImageTiling tiling = VULKAN_HPP_NAMESPACE::ImageTiling::eOptimal;
  42342. };
  42343. static_assert( sizeof( PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ), "struct and wrapper have different size!" );
  42344. static_assert( std::is_standard_layout<PhysicalDeviceSparseImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
  42345. template <>
  42346. struct CppType<StructureType, StructureType::ePhysicalDeviceSparseImageFormatInfo2>
  42347. {
  42348. using Type = PhysicalDeviceSparseImageFormatInfo2;
  42349. };
  42350. using PhysicalDeviceSparseImageFormatInfo2KHR = PhysicalDeviceSparseImageFormatInfo2;
  42351. struct SparseImageFormatProperties2
  42352. {
  42353. static const bool allowDuplicate = false;
  42354. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSparseImageFormatProperties2;
  42355. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42356. VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2(VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties_ = {}) VULKAN_HPP_NOEXCEPT
  42357. : properties( properties_ )
  42358. {}
  42359. VULKAN_HPP_CONSTEXPR SparseImageFormatProperties2( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42360. SparseImageFormatProperties2( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42361. : SparseImageFormatProperties2( *reinterpret_cast<SparseImageFormatProperties2 const *>( &rhs ) )
  42362. {}
  42363. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42364. VULKAN_HPP_CONSTEXPR_14 SparseImageFormatProperties2 & operator=( SparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42365. SparseImageFormatProperties2 & operator=( VkSparseImageFormatProperties2 const & rhs ) VULKAN_HPP_NOEXCEPT
  42366. {
  42367. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 const *>( &rhs );
  42368. return *this;
  42369. }
  42370. operator VkSparseImageFormatProperties2 const&() const VULKAN_HPP_NOEXCEPT
  42371. {
  42372. return *reinterpret_cast<const VkSparseImageFormatProperties2*>( this );
  42373. }
  42374. operator VkSparseImageFormatProperties2 &() VULKAN_HPP_NOEXCEPT
  42375. {
  42376. return *reinterpret_cast<VkSparseImageFormatProperties2*>( this );
  42377. }
  42378. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42379. auto operator<=>( SparseImageFormatProperties2 const& ) const = default;
  42380. #else
  42381. bool operator==( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42382. {
  42383. return ( sType == rhs.sType )
  42384. && ( pNext == rhs.pNext )
  42385. && ( properties == rhs.properties );
  42386. }
  42387. bool operator!=( SparseImageFormatProperties2 const& rhs ) const VULKAN_HPP_NOEXCEPT
  42388. {
  42389. return !operator==( rhs );
  42390. }
  42391. #endif
  42392. public:
  42393. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSparseImageFormatProperties2;
  42394. void* pNext = {};
  42395. VULKAN_HPP_NAMESPACE::SparseImageFormatProperties properties = {};
  42396. };
  42397. static_assert( sizeof( SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ), "struct and wrapper have different size!" );
  42398. static_assert( std::is_standard_layout<SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
  42399. template <>
  42400. struct CppType<StructureType, StructureType::eSparseImageFormatProperties2>
  42401. {
  42402. using Type = SparseImageFormatProperties2;
  42403. };
  42404. using SparseImageFormatProperties2KHR = SparseImageFormatProperties2;
  42405. struct FramebufferMixedSamplesCombinationNV
  42406. {
  42407. static const bool allowDuplicate = false;
  42408. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferMixedSamplesCombinationNV;
  42409. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42410. VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV(VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge, VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples_ = {}) VULKAN_HPP_NOEXCEPT
  42411. : coverageReductionMode( coverageReductionMode_ ), rasterizationSamples( rasterizationSamples_ ), depthStencilSamples( depthStencilSamples_ ), colorSamples( colorSamples_ )
  42412. {}
  42413. VULKAN_HPP_CONSTEXPR FramebufferMixedSamplesCombinationNV( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42414. FramebufferMixedSamplesCombinationNV( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
  42415. : FramebufferMixedSamplesCombinationNV( *reinterpret_cast<FramebufferMixedSamplesCombinationNV const *>( &rhs ) )
  42416. {}
  42417. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42418. VULKAN_HPP_CONSTEXPR_14 FramebufferMixedSamplesCombinationNV & operator=( FramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42419. FramebufferMixedSamplesCombinationNV & operator=( VkFramebufferMixedSamplesCombinationNV const & rhs ) VULKAN_HPP_NOEXCEPT
  42420. {
  42421. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV const *>( &rhs );
  42422. return *this;
  42423. }
  42424. operator VkFramebufferMixedSamplesCombinationNV const&() const VULKAN_HPP_NOEXCEPT
  42425. {
  42426. return *reinterpret_cast<const VkFramebufferMixedSamplesCombinationNV*>( this );
  42427. }
  42428. operator VkFramebufferMixedSamplesCombinationNV &() VULKAN_HPP_NOEXCEPT
  42429. {
  42430. return *reinterpret_cast<VkFramebufferMixedSamplesCombinationNV*>( this );
  42431. }
  42432. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42433. auto operator<=>( FramebufferMixedSamplesCombinationNV const& ) const = default;
  42434. #else
  42435. bool operator==( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  42436. {
  42437. return ( sType == rhs.sType )
  42438. && ( pNext == rhs.pNext )
  42439. && ( coverageReductionMode == rhs.coverageReductionMode )
  42440. && ( rasterizationSamples == rhs.rasterizationSamples )
  42441. && ( depthStencilSamples == rhs.depthStencilSamples )
  42442. && ( colorSamples == rhs.colorSamples );
  42443. }
  42444. bool operator!=( FramebufferMixedSamplesCombinationNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  42445. {
  42446. return !operator==( rhs );
  42447. }
  42448. #endif
  42449. public:
  42450. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferMixedSamplesCombinationNV;
  42451. void* pNext = {};
  42452. VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
  42453. VULKAN_HPP_NAMESPACE::SampleCountFlagBits rasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  42454. VULKAN_HPP_NAMESPACE::SampleCountFlags depthStencilSamples = {};
  42455. VULKAN_HPP_NAMESPACE::SampleCountFlags colorSamples = {};
  42456. };
  42457. static_assert( sizeof( FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ), "struct and wrapper have different size!" );
  42458. static_assert( std::is_standard_layout<FramebufferMixedSamplesCombinationNV>::value, "struct wrapper is not a standard layout!" );
  42459. template <>
  42460. struct CppType<StructureType, StructureType::eFramebufferMixedSamplesCombinationNV>
  42461. {
  42462. using Type = FramebufferMixedSamplesCombinationNV;
  42463. };
  42464. struct SurfaceCapabilities2EXT
  42465. {
  42466. static const bool allowDuplicate = false;
  42467. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2EXT;
  42468. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42469. VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
  42470. : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ ), supportedSurfaceCounters( supportedSurfaceCounters_ )
  42471. {}
  42472. VULKAN_HPP_CONSTEXPR SurfaceCapabilities2EXT( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42473. SurfaceCapabilities2EXT( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
  42474. : SurfaceCapabilities2EXT( *reinterpret_cast<SurfaceCapabilities2EXT const *>( &rhs ) )
  42475. {}
  42476. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42477. VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2EXT & operator=( SurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42478. SurfaceCapabilities2EXT & operator=( VkSurfaceCapabilities2EXT const & rhs ) VULKAN_HPP_NOEXCEPT
  42479. {
  42480. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT const *>( &rhs );
  42481. return *this;
  42482. }
  42483. operator VkSurfaceCapabilities2EXT const&() const VULKAN_HPP_NOEXCEPT
  42484. {
  42485. return *reinterpret_cast<const VkSurfaceCapabilities2EXT*>( this );
  42486. }
  42487. operator VkSurfaceCapabilities2EXT &() VULKAN_HPP_NOEXCEPT
  42488. {
  42489. return *reinterpret_cast<VkSurfaceCapabilities2EXT*>( this );
  42490. }
  42491. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42492. auto operator<=>( SurfaceCapabilities2EXT const& ) const = default;
  42493. #else
  42494. bool operator==( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  42495. {
  42496. return ( sType == rhs.sType )
  42497. && ( pNext == rhs.pNext )
  42498. && ( minImageCount == rhs.minImageCount )
  42499. && ( maxImageCount == rhs.maxImageCount )
  42500. && ( currentExtent == rhs.currentExtent )
  42501. && ( minImageExtent == rhs.minImageExtent )
  42502. && ( maxImageExtent == rhs.maxImageExtent )
  42503. && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
  42504. && ( supportedTransforms == rhs.supportedTransforms )
  42505. && ( currentTransform == rhs.currentTransform )
  42506. && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
  42507. && ( supportedUsageFlags == rhs.supportedUsageFlags )
  42508. && ( supportedSurfaceCounters == rhs.supportedSurfaceCounters );
  42509. }
  42510. bool operator!=( SurfaceCapabilities2EXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  42511. {
  42512. return !operator==( rhs );
  42513. }
  42514. #endif
  42515. public:
  42516. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2EXT;
  42517. void* pNext = {};
  42518. uint32_t minImageCount = {};
  42519. uint32_t maxImageCount = {};
  42520. VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
  42521. VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
  42522. VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
  42523. uint32_t maxImageArrayLayers = {};
  42524. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
  42525. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  42526. VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
  42527. VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
  42528. VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT supportedSurfaceCounters = {};
  42529. };
  42530. static_assert( sizeof( SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ), "struct and wrapper have different size!" );
  42531. static_assert( std::is_standard_layout<SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
  42532. template <>
  42533. struct CppType<StructureType, StructureType::eSurfaceCapabilities2EXT>
  42534. {
  42535. using Type = SurfaceCapabilities2EXT;
  42536. };
  42537. struct SurfaceCapabilitiesKHR
  42538. {
  42539. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42540. VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR(uint32_t minImageCount_ = {}, uint32_t maxImageCount_ = {}, VULKAN_HPP_NAMESPACE::Extent2D currentExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D minImageExtent_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent_ = {}, uint32_t maxImageArrayLayers_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
  42541. : minImageCount( minImageCount_ ), maxImageCount( maxImageCount_ ), currentExtent( currentExtent_ ), minImageExtent( minImageExtent_ ), maxImageExtent( maxImageExtent_ ), maxImageArrayLayers( maxImageArrayLayers_ ), supportedTransforms( supportedTransforms_ ), currentTransform( currentTransform_ ), supportedCompositeAlpha( supportedCompositeAlpha_ ), supportedUsageFlags( supportedUsageFlags_ )
  42542. {}
  42543. VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesKHR( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42544. SurfaceCapabilitiesKHR( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42545. : SurfaceCapabilitiesKHR( *reinterpret_cast<SurfaceCapabilitiesKHR const *>( &rhs ) )
  42546. {}
  42547. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42548. VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesKHR & operator=( SurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42549. SurfaceCapabilitiesKHR & operator=( VkSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42550. {
  42551. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR const *>( &rhs );
  42552. return *this;
  42553. }
  42554. operator VkSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
  42555. {
  42556. return *reinterpret_cast<const VkSurfaceCapabilitiesKHR*>( this );
  42557. }
  42558. operator VkSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
  42559. {
  42560. return *reinterpret_cast<VkSurfaceCapabilitiesKHR*>( this );
  42561. }
  42562. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42563. auto operator<=>( SurfaceCapabilitiesKHR const& ) const = default;
  42564. #else
  42565. bool operator==( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42566. {
  42567. return ( minImageCount == rhs.minImageCount )
  42568. && ( maxImageCount == rhs.maxImageCount )
  42569. && ( currentExtent == rhs.currentExtent )
  42570. && ( minImageExtent == rhs.minImageExtent )
  42571. && ( maxImageExtent == rhs.maxImageExtent )
  42572. && ( maxImageArrayLayers == rhs.maxImageArrayLayers )
  42573. && ( supportedTransforms == rhs.supportedTransforms )
  42574. && ( currentTransform == rhs.currentTransform )
  42575. && ( supportedCompositeAlpha == rhs.supportedCompositeAlpha )
  42576. && ( supportedUsageFlags == rhs.supportedUsageFlags );
  42577. }
  42578. bool operator!=( SurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42579. {
  42580. return !operator==( rhs );
  42581. }
  42582. #endif
  42583. public:
  42584. uint32_t minImageCount = {};
  42585. uint32_t maxImageCount = {};
  42586. VULKAN_HPP_NAMESPACE::Extent2D currentExtent = {};
  42587. VULKAN_HPP_NAMESPACE::Extent2D minImageExtent = {};
  42588. VULKAN_HPP_NAMESPACE::Extent2D maxImageExtent = {};
  42589. uint32_t maxImageArrayLayers = {};
  42590. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR supportedTransforms = {};
  42591. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR currentTransform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  42592. VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR supportedCompositeAlpha = {};
  42593. VULKAN_HPP_NAMESPACE::ImageUsageFlags supportedUsageFlags = {};
  42594. };
  42595. static_assert( sizeof( SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
  42596. static_assert( std::is_standard_layout<SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
  42597. struct SurfaceCapabilities2KHR
  42598. {
  42599. static const bool allowDuplicate = false;
  42600. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilities2KHR;
  42601. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42602. VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR(VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities_ = {}) VULKAN_HPP_NOEXCEPT
  42603. : surfaceCapabilities( surfaceCapabilities_ )
  42604. {}
  42605. VULKAN_HPP_CONSTEXPR SurfaceCapabilities2KHR( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42606. SurfaceCapabilities2KHR( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42607. : SurfaceCapabilities2KHR( *reinterpret_cast<SurfaceCapabilities2KHR const *>( &rhs ) )
  42608. {}
  42609. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42610. VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilities2KHR & operator=( SurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42611. SurfaceCapabilities2KHR & operator=( VkSurfaceCapabilities2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42612. {
  42613. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR const *>( &rhs );
  42614. return *this;
  42615. }
  42616. operator VkSurfaceCapabilities2KHR const&() const VULKAN_HPP_NOEXCEPT
  42617. {
  42618. return *reinterpret_cast<const VkSurfaceCapabilities2KHR*>( this );
  42619. }
  42620. operator VkSurfaceCapabilities2KHR &() VULKAN_HPP_NOEXCEPT
  42621. {
  42622. return *reinterpret_cast<VkSurfaceCapabilities2KHR*>( this );
  42623. }
  42624. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42625. auto operator<=>( SurfaceCapabilities2KHR const& ) const = default;
  42626. #else
  42627. bool operator==( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42628. {
  42629. return ( sType == rhs.sType )
  42630. && ( pNext == rhs.pNext )
  42631. && ( surfaceCapabilities == rhs.surfaceCapabilities );
  42632. }
  42633. bool operator!=( SurfaceCapabilities2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42634. {
  42635. return !operator==( rhs );
  42636. }
  42637. #endif
  42638. public:
  42639. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilities2KHR;
  42640. void* pNext = {};
  42641. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities = {};
  42642. };
  42643. static_assert( sizeof( SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ), "struct and wrapper have different size!" );
  42644. static_assert( std::is_standard_layout<SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
  42645. template <>
  42646. struct CppType<StructureType, StructureType::eSurfaceCapabilities2KHR>
  42647. {
  42648. using Type = SurfaceCapabilities2KHR;
  42649. };
  42650. struct SurfaceFormatKHR
  42651. {
  42652. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42653. VULKAN_HPP_CONSTEXPR SurfaceFormatKHR(VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined, VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace_ = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear) VULKAN_HPP_NOEXCEPT
  42654. : format( format_ ), colorSpace( colorSpace_ )
  42655. {}
  42656. VULKAN_HPP_CONSTEXPR SurfaceFormatKHR( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42657. SurfaceFormatKHR( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42658. : SurfaceFormatKHR( *reinterpret_cast<SurfaceFormatKHR const *>( &rhs ) )
  42659. {}
  42660. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42661. VULKAN_HPP_CONSTEXPR_14 SurfaceFormatKHR & operator=( SurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42662. SurfaceFormatKHR & operator=( VkSurfaceFormatKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42663. {
  42664. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR const *>( &rhs );
  42665. return *this;
  42666. }
  42667. operator VkSurfaceFormatKHR const&() const VULKAN_HPP_NOEXCEPT
  42668. {
  42669. return *reinterpret_cast<const VkSurfaceFormatKHR*>( this );
  42670. }
  42671. operator VkSurfaceFormatKHR &() VULKAN_HPP_NOEXCEPT
  42672. {
  42673. return *reinterpret_cast<VkSurfaceFormatKHR*>( this );
  42674. }
  42675. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42676. auto operator<=>( SurfaceFormatKHR const& ) const = default;
  42677. #else
  42678. bool operator==( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42679. {
  42680. return ( format == rhs.format )
  42681. && ( colorSpace == rhs.colorSpace );
  42682. }
  42683. bool operator!=( SurfaceFormatKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42684. {
  42685. return !operator==( rhs );
  42686. }
  42687. #endif
  42688. public:
  42689. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  42690. VULKAN_HPP_NAMESPACE::ColorSpaceKHR colorSpace = VULKAN_HPP_NAMESPACE::ColorSpaceKHR::eSrgbNonlinear;
  42691. };
  42692. static_assert( sizeof( SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
  42693. static_assert( std::is_standard_layout<SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
  42694. struct SurfaceFormat2KHR
  42695. {
  42696. static const bool allowDuplicate = false;
  42697. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFormat2KHR;
  42698. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42699. VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR(VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat_ = {}) VULKAN_HPP_NOEXCEPT
  42700. : surfaceFormat( surfaceFormat_ )
  42701. {}
  42702. VULKAN_HPP_CONSTEXPR SurfaceFormat2KHR( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42703. SurfaceFormat2KHR( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42704. : SurfaceFormat2KHR( *reinterpret_cast<SurfaceFormat2KHR const *>( &rhs ) )
  42705. {}
  42706. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42707. VULKAN_HPP_CONSTEXPR_14 SurfaceFormat2KHR & operator=( SurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42708. SurfaceFormat2KHR & operator=( VkSurfaceFormat2KHR const & rhs ) VULKAN_HPP_NOEXCEPT
  42709. {
  42710. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR const *>( &rhs );
  42711. return *this;
  42712. }
  42713. operator VkSurfaceFormat2KHR const&() const VULKAN_HPP_NOEXCEPT
  42714. {
  42715. return *reinterpret_cast<const VkSurfaceFormat2KHR*>( this );
  42716. }
  42717. operator VkSurfaceFormat2KHR &() VULKAN_HPP_NOEXCEPT
  42718. {
  42719. return *reinterpret_cast<VkSurfaceFormat2KHR*>( this );
  42720. }
  42721. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42722. auto operator<=>( SurfaceFormat2KHR const& ) const = default;
  42723. #else
  42724. bool operator==( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42725. {
  42726. return ( sType == rhs.sType )
  42727. && ( pNext == rhs.pNext )
  42728. && ( surfaceFormat == rhs.surfaceFormat );
  42729. }
  42730. bool operator!=( SurfaceFormat2KHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  42731. {
  42732. return !operator==( rhs );
  42733. }
  42734. #endif
  42735. public:
  42736. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFormat2KHR;
  42737. void* pNext = {};
  42738. VULKAN_HPP_NAMESPACE::SurfaceFormatKHR surfaceFormat = {};
  42739. };
  42740. static_assert( sizeof( SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
  42741. static_assert( std::is_standard_layout<SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
  42742. template <>
  42743. struct CppType<StructureType, StructureType::eSurfaceFormat2KHR>
  42744. {
  42745. using Type = SurfaceFormat2KHR;
  42746. };
  42747. struct PhysicalDeviceToolPropertiesEXT
  42748. {
  42749. static const bool allowDuplicate = false;
  42750. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceToolPropertiesEXT;
  42751. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42752. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT(std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& name_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& version_ = {}, VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes_ = {}, std::array<char,VK_MAX_DESCRIPTION_SIZE> const& description_ = {}, std::array<char,VK_MAX_EXTENSION_NAME_SIZE> const& layer_ = {}) VULKAN_HPP_NOEXCEPT
  42753. : name( name_ ), version( version_ ), purposes( purposes_ ), description( description_ ), layer( layer_ )
  42754. {}
  42755. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42756. PhysicalDeviceToolPropertiesEXT( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  42757. : PhysicalDeviceToolPropertiesEXT( *reinterpret_cast<PhysicalDeviceToolPropertiesEXT const *>( &rhs ) )
  42758. {}
  42759. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  42760. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceToolPropertiesEXT & operator=( PhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  42761. PhysicalDeviceToolPropertiesEXT & operator=( VkPhysicalDeviceToolPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  42762. {
  42763. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT const *>( &rhs );
  42764. return *this;
  42765. }
  42766. operator VkPhysicalDeviceToolPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  42767. {
  42768. return *reinterpret_cast<const VkPhysicalDeviceToolPropertiesEXT*>( this );
  42769. }
  42770. operator VkPhysicalDeviceToolPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  42771. {
  42772. return *reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT*>( this );
  42773. }
  42774. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42775. auto operator<=>( PhysicalDeviceToolPropertiesEXT const& ) const = default;
  42776. #else
  42777. bool operator==( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  42778. {
  42779. return ( sType == rhs.sType )
  42780. && ( pNext == rhs.pNext )
  42781. && ( name == rhs.name )
  42782. && ( version == rhs.version )
  42783. && ( purposes == rhs.purposes )
  42784. && ( description == rhs.description )
  42785. && ( layer == rhs.layer );
  42786. }
  42787. bool operator!=( PhysicalDeviceToolPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  42788. {
  42789. return !operator==( rhs );
  42790. }
  42791. #endif
  42792. public:
  42793. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceToolPropertiesEXT;
  42794. void* pNext = {};
  42795. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> name = {};
  42796. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> version = {};
  42797. VULKAN_HPP_NAMESPACE::ToolPurposeFlagsEXT purposes = {};
  42798. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DESCRIPTION_SIZE> description = {};
  42799. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_EXTENSION_NAME_SIZE> layer = {};
  42800. };
  42801. static_assert( sizeof( PhysicalDeviceToolPropertiesEXT ) == sizeof( VkPhysicalDeviceToolPropertiesEXT ), "struct and wrapper have different size!" );
  42802. static_assert( std::is_standard_layout<PhysicalDeviceToolPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  42803. template <>
  42804. struct CppType<StructureType, StructureType::ePhysicalDeviceToolPropertiesEXT>
  42805. {
  42806. using Type = PhysicalDeviceToolPropertiesEXT;
  42807. };
  42808. #ifndef VULKAN_HPP_NO_SMART_HANDLE
  42809. template <typename Dispatch> class UniqueHandleTraits<Device, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
  42810. using UniqueDevice = UniqueHandle<Device, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  42811. #endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  42812. class PhysicalDevice
  42813. {
  42814. public:
  42815. using CType = VkPhysicalDevice;
  42816. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice;
  42817. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice;
  42818. public:
  42819. VULKAN_HPP_CONSTEXPR PhysicalDevice() VULKAN_HPP_NOEXCEPT
  42820. : m_physicalDevice(VK_NULL_HANDLE)
  42821. {}
  42822. VULKAN_HPP_CONSTEXPR PhysicalDevice( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  42823. : m_physicalDevice(VK_NULL_HANDLE)
  42824. {}
  42825. VULKAN_HPP_TYPESAFE_EXPLICIT PhysicalDevice( VkPhysicalDevice physicalDevice ) VULKAN_HPP_NOEXCEPT
  42826. : m_physicalDevice( physicalDevice )
  42827. {}
  42828. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  42829. PhysicalDevice & operator=(VkPhysicalDevice physicalDevice) VULKAN_HPP_NOEXCEPT
  42830. {
  42831. m_physicalDevice = physicalDevice;
  42832. return *this;
  42833. }
  42834. #endif
  42835. PhysicalDevice & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  42836. {
  42837. m_physicalDevice = VK_NULL_HANDLE;
  42838. return *this;
  42839. }
  42840. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  42841. auto operator<=>( PhysicalDevice const& ) const = default;
  42842. #else
  42843. bool operator==( PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
  42844. {
  42845. return m_physicalDevice == rhs.m_physicalDevice;
  42846. }
  42847. bool operator!=(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
  42848. {
  42849. return m_physicalDevice != rhs.m_physicalDevice;
  42850. }
  42851. bool operator<(PhysicalDevice const & rhs ) const VULKAN_HPP_NOEXCEPT
  42852. {
  42853. return m_physicalDevice < rhs.m_physicalDevice;
  42854. }
  42855. #endif
  42856. #ifdef VK_USE_PLATFORM_WIN32_KHR
  42857. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42858. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42859. VULKAN_HPP_NODISCARD Result acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42860. #else
  42861. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42862. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42863. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42864. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  42865. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  42866. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42867. VULKAN_HPP_NODISCARD Result acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42868. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42869. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42870. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<void>::type acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42871. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42872. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  42873. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42874. VULKAN_HPP_NODISCARD Result createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42875. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42876. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42877. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42878. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  42879. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42880. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42881. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  42882. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42883. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42884. VULKAN_HPP_NODISCARD Result createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42885. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42886. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42887. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42888. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  42889. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42890. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42891. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  42892. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42893. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42894. VULKAN_HPP_NODISCARD Result enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42895. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42896. template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42897. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42898. template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = ExtensionPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
  42899. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42900. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42901. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42902. VULKAN_HPP_NODISCARD Result enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42903. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42904. template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42905. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42906. template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = LayerPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
  42907. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42908. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42909. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42910. VULKAN_HPP_NODISCARD Result enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42911. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42912. template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42913. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42914. template <typename Allocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Allocator, typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
  42915. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42916. template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42917. VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42918. template <typename PerformanceCounterKHRAllocator = std::allocator<PerformanceCounterKHR>, typename PerformanceCounterDescriptionKHRAllocator = std::allocator<PerformanceCounterDescriptionKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B1 = PerformanceCounterKHRAllocator, typename B2 = PerformanceCounterDescriptionKHRAllocator, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type = 0>
  42919. VULKAN_HPP_NODISCARD typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42920. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42921. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42922. VULKAN_HPP_NODISCARD Result getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42923. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42924. template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42925. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42926. template <typename DisplayModeProperties2KHRAllocator = std::allocator<DisplayModeProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayModeProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type = 0>
  42927. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42928. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42929. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42930. VULKAN_HPP_NODISCARD Result getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42931. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42932. template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42933. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42934. template <typename DisplayModePropertiesKHRAllocator = std::allocator<DisplayModePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayModePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type = 0>
  42935. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42936. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42937. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42938. VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42939. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42940. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42941. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42942. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42943. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42944. VULKAN_HPP_NODISCARD Result getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42945. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42946. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42947. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42948. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42949. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42950. VULKAN_HPP_NODISCARD Result getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42951. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42952. template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42953. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42954. template <typename DisplayKHRAllocator = std::allocator<DisplayKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type = 0>
  42955. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42956. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42957. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42958. VULKAN_HPP_NODISCARD Result getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42959. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42960. template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42961. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42962. template <typename TimeDomainEXTAllocator = std::allocator<TimeDomainEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = TimeDomainEXTAllocator, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type = 0>
  42963. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42964. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42965. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42966. VULKAN_HPP_NODISCARD Result getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42967. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42968. template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42969. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42970. template <typename CooperativeMatrixPropertiesNVAllocator = std::allocator<CooperativeMatrixPropertiesNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = CooperativeMatrixPropertiesNVAllocator, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type = 0>
  42971. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42972. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42973. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  42974. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42975. Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42976. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42977. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42978. Bool32 getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42979. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42980. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  42981. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42982. VULKAN_HPP_NODISCARD Result getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42983. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42984. template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42985. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42986. template <typename DisplayPlaneProperties2KHRAllocator = std::allocator<DisplayPlaneProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayPlaneProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type = 0>
  42987. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42988. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42989. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42990. VULKAN_HPP_NODISCARD Result getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42991. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  42992. template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42993. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42994. template <typename DisplayPlanePropertiesKHRAllocator = std::allocator<DisplayPlanePropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayPlanePropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type = 0>
  42995. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  42996. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  42997. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  42998. VULKAN_HPP_NODISCARD Result getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  42999. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43000. template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43001. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43002. template <typename DisplayProperties2KHRAllocator = std::allocator<DisplayProperties2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayProperties2KHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type = 0>
  43003. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43004. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43005. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43006. VULKAN_HPP_NODISCARD Result getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43007. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43008. template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43009. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43010. template <typename DisplayPropertiesKHRAllocator = std::allocator<DisplayPropertiesKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = DisplayPropertiesKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type = 0>
  43011. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43012. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43013. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43014. void getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43015. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43016. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43017. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43018. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43019. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43020. void getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43021. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43022. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43023. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalBufferProperties getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43024. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43025. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43026. void getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43027. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43028. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43029. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43030. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43031. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43032. void getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43033. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43034. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43035. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalFenceProperties getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43036. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43037. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43038. VULKAN_HPP_NODISCARD Result getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43039. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43040. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43041. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43042. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43043. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43044. void getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43045. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43046. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43047. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43048. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43049. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43050. void getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43051. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43052. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43053. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43054. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43055. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43056. void getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43057. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43058. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43059. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures getFeatures( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43060. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43061. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43062. void getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43063. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43064. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43065. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43066. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43067. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43068. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43069. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43070. void getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43071. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43072. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43073. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43074. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43075. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFeatures2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43076. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43077. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43078. void getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43079. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43080. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43081. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43082. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43083. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43084. void getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43085. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43086. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43087. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43088. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43089. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43090. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43091. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43092. void getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43093. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43094. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43095. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::FormatProperties2 getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43096. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43097. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43098. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43099. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43100. VULKAN_HPP_NODISCARD Result getFragmentShadingRatesKHR( uint32_t* pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43101. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43102. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43103. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43104. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator = std::allocator<PhysicalDeviceFragmentShadingRateKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PhysicalDeviceFragmentShadingRateKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type = 0>
  43105. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43106. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43107. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43108. VULKAN_HPP_NODISCARD Result getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43109. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43110. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43111. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43112. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43113. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43114. VULKAN_HPP_NODISCARD Result getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43115. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43116. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43117. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43118. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43119. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43120. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43121. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43122. VULKAN_HPP_NODISCARD Result getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43123. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43124. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43125. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43126. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43127. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43128. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43129. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43130. void getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43131. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43132. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43133. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties getMemoryProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43134. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43135. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43136. void getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43137. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43138. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43139. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43140. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43141. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43142. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43143. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43144. void getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43145. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43146. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43147. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43148. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43149. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getMemoryProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43150. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43151. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43152. void getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43153. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43154. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43155. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43156. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43157. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43158. VULKAN_HPP_NODISCARD Result getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43159. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43160. template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43161. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43162. template <typename Rect2DAllocator = std::allocator<Rect2D>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = Rect2DAllocator, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type = 0>
  43163. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43164. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43165. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43166. void getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43167. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43168. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43169. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties getProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43170. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43171. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43172. void getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43173. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43174. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43175. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43176. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43177. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43178. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43179. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43180. void getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43181. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43182. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43183. VULKAN_HPP_NODISCARD VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43184. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43185. VULKAN_HPP_NODISCARD StructureChain<X, Y, Z...> getProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43186. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43187. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43188. void getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43189. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43190. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43191. VULKAN_HPP_NODISCARD uint32_t getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43192. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43193. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43194. void getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43195. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43196. template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43197. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43198. template <typename QueueFamilyPropertiesAllocator = std::allocator<QueueFamilyProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = QueueFamilyPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type = 0>
  43199. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43200. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43201. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43202. void getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43203. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43204. template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43205. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43206. template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = QueueFamilyProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
  43207. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43208. template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43209. VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43210. template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = StructureChainAllocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
  43211. VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43212. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43213. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43214. void getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43215. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43216. template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43217. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43218. template <typename QueueFamilyProperties2Allocator = std::allocator<QueueFamilyProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = QueueFamilyProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type = 0>
  43219. VULKAN_HPP_NODISCARD std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43220. template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43221. VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43222. template <typename StructureChain, typename StructureChainAllocator = std::allocator<StructureChain>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = StructureChainAllocator, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type = 0>
  43223. VULKAN_HPP_NODISCARD std::vector<StructureChain, StructureChainAllocator> getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43224. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43225. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43226. void getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43227. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43228. template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43229. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43230. template <typename SparseImageFormatPropertiesAllocator = std::allocator<SparseImageFormatProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type = 0>
  43231. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43232. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43233. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43234. void getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43235. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43236. template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43237. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43238. template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
  43239. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43240. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43241. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43242. void getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43243. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43244. template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43245. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43246. template <typename SparseImageFormatProperties2Allocator = std::allocator<SparseImageFormatProperties2>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SparseImageFormatProperties2Allocator, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type = 0>
  43247. VULKAN_HPP_NODISCARD std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43248. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43249. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43250. VULKAN_HPP_NODISCARD Result getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43251. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43252. template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43253. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43254. template <typename FramebufferMixedSamplesCombinationNVAllocator = std::allocator<FramebufferMixedSamplesCombinationNV>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = FramebufferMixedSamplesCombinationNVAllocator, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type = 0>
  43255. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43256. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43257. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43258. VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43259. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43260. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43261. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43262. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43263. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43264. VULKAN_HPP_NODISCARD Result getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43265. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43266. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43267. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43268. template <typename X, typename Y, typename... Z, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43269. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<StructureChain<X, Y, Z...>>::type getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43270. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43271. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43272. VULKAN_HPP_NODISCARD Result getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43273. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43274. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43275. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43276. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43277. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43278. VULKAN_HPP_NODISCARD Result getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43279. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43280. template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43281. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43282. template <typename SurfaceFormat2KHRAllocator = std::allocator<SurfaceFormat2KHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SurfaceFormat2KHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type = 0>
  43283. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43284. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43285. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43286. VULKAN_HPP_NODISCARD Result getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43287. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43288. template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43289. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43290. template <typename SurfaceFormatKHRAllocator = std::allocator<SurfaceFormatKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = SurfaceFormatKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type = 0>
  43291. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43292. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43293. #ifdef VK_USE_PLATFORM_WIN32_KHR
  43294. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43295. VULKAN_HPP_NODISCARD Result getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43296. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43297. template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43298. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43299. template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PresentModeKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
  43300. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43301. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43302. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  43303. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43304. VULKAN_HPP_NODISCARD Result getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43305. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43306. template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43307. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43308. template <typename PresentModeKHRAllocator = std::allocator<PresentModeKHR>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PresentModeKHRAllocator, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type = 0>
  43309. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43310. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43311. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43312. VULKAN_HPP_NODISCARD Result getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43313. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43314. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43315. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43316. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43317. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43318. VULKAN_HPP_NODISCARD Result getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43319. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43320. template <typename PhysicalDeviceToolPropertiesEXTAllocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43321. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type getToolPropertiesEXT( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43322. template <typename PhysicalDeviceToolPropertiesEXTAllocator = std::allocator<PhysicalDeviceToolPropertiesEXT>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PhysicalDeviceToolPropertiesEXTAllocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type = 0>
  43323. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type getToolPropertiesEXT( PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43324. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43325. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  43326. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43327. Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43328. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43329. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43330. Bool32 getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43331. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43332. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  43333. #ifdef VK_USE_PLATFORM_WIN32_KHR
  43334. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43335. Bool32 getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43336. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  43337. #ifdef VK_USE_PLATFORM_XCB_KHR
  43338. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43339. Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43340. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43341. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43342. Bool32 getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43343. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43344. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  43345. #ifdef VK_USE_PLATFORM_XLIB_KHR
  43346. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43347. Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43348. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43349. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43350. Bool32 getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43351. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43352. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  43353. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  43354. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43355. VULKAN_HPP_NODISCARD Result getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43356. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43357. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43358. typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43359. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  43360. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43361. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43362. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  43363. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43364. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  43365. #ifdef VK_USE_PLATFORM_WIN32_KHR
  43366. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43367. VULKAN_HPP_NODISCARD Result getWinrtDisplayNV( uint32_t deviceRelativeId, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43368. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43369. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43370. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43371. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  43372. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43373. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43374. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  43375. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43376. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  43377. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  43378. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43379. Result releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  43380. #else
  43381. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  43382. typename ResultValueType<void>::type releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  43383. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  43384. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkPhysicalDevice() const VULKAN_HPP_NOEXCEPT
  43385. {
  43386. return m_physicalDevice;
  43387. }
  43388. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  43389. {
  43390. return m_physicalDevice != VK_NULL_HANDLE;
  43391. }
  43392. bool operator!() const VULKAN_HPP_NOEXCEPT
  43393. {
  43394. return m_physicalDevice == VK_NULL_HANDLE;
  43395. }
  43396. private:
  43397. VkPhysicalDevice m_physicalDevice;
  43398. };
  43399. static_assert( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
  43400. template <>
  43401. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::ePhysicalDevice>
  43402. {
  43403. using type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  43404. };
  43405. template <>
  43406. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::ePhysicalDevice>
  43407. {
  43408. using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  43409. };
  43410. template <>
  43411. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::ePhysicalDevice>
  43412. {
  43413. using Type = VULKAN_HPP_NAMESPACE::PhysicalDevice;
  43414. };
  43415. template <>
  43416. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::PhysicalDevice>
  43417. {
  43418. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  43419. };
  43420. struct DeviceGroupDeviceCreateInfo
  43421. {
  43422. static const bool allowDuplicate = false;
  43423. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupDeviceCreateInfo;
  43424. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43425. VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo(uint32_t physicalDeviceCount_ = {}, const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ = {}) VULKAN_HPP_NOEXCEPT
  43426. : physicalDeviceCount( physicalDeviceCount_ ), pPhysicalDevices( pPhysicalDevices_ )
  43427. {}
  43428. VULKAN_HPP_CONSTEXPR DeviceGroupDeviceCreateInfo( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43429. DeviceGroupDeviceCreateInfo( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43430. : DeviceGroupDeviceCreateInfo( *reinterpret_cast<DeviceGroupDeviceCreateInfo const *>( &rhs ) )
  43431. {}
  43432. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43433. DeviceGroupDeviceCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ )
  43434. : physicalDeviceCount( static_cast<uint32_t>( physicalDevices_.size() ) ), pPhysicalDevices( physicalDevices_.data() )
  43435. {}
  43436. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43437. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43438. VULKAN_HPP_CONSTEXPR_14 DeviceGroupDeviceCreateInfo & operator=( DeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43439. DeviceGroupDeviceCreateInfo & operator=( VkDeviceGroupDeviceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43440. {
  43441. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo const *>( &rhs );
  43442. return *this;
  43443. }
  43444. DeviceGroupDeviceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43445. {
  43446. pNext = pNext_;
  43447. return *this;
  43448. }
  43449. DeviceGroupDeviceCreateInfo & setPhysicalDeviceCount( uint32_t physicalDeviceCount_ ) VULKAN_HPP_NOEXCEPT
  43450. {
  43451. physicalDeviceCount = physicalDeviceCount_;
  43452. return *this;
  43453. }
  43454. DeviceGroupDeviceCreateInfo & setPPhysicalDevices( const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices_ ) VULKAN_HPP_NOEXCEPT
  43455. {
  43456. pPhysicalDevices = pPhysicalDevices_;
  43457. return *this;
  43458. }
  43459. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43460. DeviceGroupDeviceCreateInfo & setPhysicalDevices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PhysicalDevice> const & physicalDevices_ ) VULKAN_HPP_NOEXCEPT
  43461. {
  43462. physicalDeviceCount = static_cast<uint32_t>( physicalDevices_.size() );
  43463. pPhysicalDevices = physicalDevices_.data();
  43464. return *this;
  43465. }
  43466. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43467. operator VkDeviceGroupDeviceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  43468. {
  43469. return *reinterpret_cast<const VkDeviceGroupDeviceCreateInfo*>( this );
  43470. }
  43471. operator VkDeviceGroupDeviceCreateInfo &() VULKAN_HPP_NOEXCEPT
  43472. {
  43473. return *reinterpret_cast<VkDeviceGroupDeviceCreateInfo*>( this );
  43474. }
  43475. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43476. auto operator<=>( DeviceGroupDeviceCreateInfo const& ) const = default;
  43477. #else
  43478. bool operator==( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43479. {
  43480. return ( sType == rhs.sType )
  43481. && ( pNext == rhs.pNext )
  43482. && ( physicalDeviceCount == rhs.physicalDeviceCount )
  43483. && ( pPhysicalDevices == rhs.pPhysicalDevices );
  43484. }
  43485. bool operator!=( DeviceGroupDeviceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43486. {
  43487. return !operator==( rhs );
  43488. }
  43489. #endif
  43490. public:
  43491. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupDeviceCreateInfo;
  43492. const void* pNext = {};
  43493. uint32_t physicalDeviceCount = {};
  43494. const VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices = {};
  43495. };
  43496. static_assert( sizeof( DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ), "struct and wrapper have different size!" );
  43497. static_assert( std::is_standard_layout<DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
  43498. template <>
  43499. struct CppType<StructureType, StructureType::eDeviceGroupDeviceCreateInfo>
  43500. {
  43501. using Type = DeviceGroupDeviceCreateInfo;
  43502. };
  43503. using DeviceGroupDeviceCreateInfoKHR = DeviceGroupDeviceCreateInfo;
  43504. struct DeviceGroupPresentInfoKHR
  43505. {
  43506. static const bool allowDuplicate = false;
  43507. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupPresentInfoKHR;
  43508. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43509. VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR(uint32_t swapchainCount_ = {}, const uint32_t* pDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal) VULKAN_HPP_NOEXCEPT
  43510. : swapchainCount( swapchainCount_ ), pDeviceMasks( pDeviceMasks_ ), mode( mode_ )
  43511. {}
  43512. VULKAN_HPP_CONSTEXPR DeviceGroupPresentInfoKHR( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43513. DeviceGroupPresentInfoKHR( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  43514. : DeviceGroupPresentInfoKHR( *reinterpret_cast<DeviceGroupPresentInfoKHR const *>( &rhs ) )
  43515. {}
  43516. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43517. DeviceGroupPresentInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal )
  43518. : swapchainCount( static_cast<uint32_t>( deviceMasks_.size() ) ), pDeviceMasks( deviceMasks_.data() ), mode( mode_ )
  43519. {}
  43520. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43521. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43522. VULKAN_HPP_CONSTEXPR_14 DeviceGroupPresentInfoKHR & operator=( DeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43523. DeviceGroupPresentInfoKHR & operator=( VkDeviceGroupPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  43524. {
  43525. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR const *>( &rhs );
  43526. return *this;
  43527. }
  43528. DeviceGroupPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43529. {
  43530. pNext = pNext_;
  43531. return *this;
  43532. }
  43533. DeviceGroupPresentInfoKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
  43534. {
  43535. swapchainCount = swapchainCount_;
  43536. return *this;
  43537. }
  43538. DeviceGroupPresentInfoKHR & setPDeviceMasks( const uint32_t* pDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
  43539. {
  43540. pDeviceMasks = pDeviceMasks_;
  43541. return *this;
  43542. }
  43543. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43544. DeviceGroupPresentInfoKHR & setDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & deviceMasks_ ) VULKAN_HPP_NOEXCEPT
  43545. {
  43546. swapchainCount = static_cast<uint32_t>( deviceMasks_.size() );
  43547. pDeviceMasks = deviceMasks_.data();
  43548. return *this;
  43549. }
  43550. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43551. DeviceGroupPresentInfoKHR & setMode( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode_ ) VULKAN_HPP_NOEXCEPT
  43552. {
  43553. mode = mode_;
  43554. return *this;
  43555. }
  43556. operator VkDeviceGroupPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  43557. {
  43558. return *reinterpret_cast<const VkDeviceGroupPresentInfoKHR*>( this );
  43559. }
  43560. operator VkDeviceGroupPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
  43561. {
  43562. return *reinterpret_cast<VkDeviceGroupPresentInfoKHR*>( this );
  43563. }
  43564. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43565. auto operator<=>( DeviceGroupPresentInfoKHR const& ) const = default;
  43566. #else
  43567. bool operator==( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  43568. {
  43569. return ( sType == rhs.sType )
  43570. && ( pNext == rhs.pNext )
  43571. && ( swapchainCount == rhs.swapchainCount )
  43572. && ( pDeviceMasks == rhs.pDeviceMasks )
  43573. && ( mode == rhs.mode );
  43574. }
  43575. bool operator!=( DeviceGroupPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  43576. {
  43577. return !operator==( rhs );
  43578. }
  43579. #endif
  43580. public:
  43581. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupPresentInfoKHR;
  43582. const void* pNext = {};
  43583. uint32_t swapchainCount = {};
  43584. const uint32_t* pDeviceMasks = {};
  43585. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR mode = VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR::eLocal;
  43586. };
  43587. static_assert( sizeof( DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ), "struct and wrapper have different size!" );
  43588. static_assert( std::is_standard_layout<DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
  43589. template <>
  43590. struct CppType<StructureType, StructureType::eDeviceGroupPresentInfoKHR>
  43591. {
  43592. using Type = DeviceGroupPresentInfoKHR;
  43593. };
  43594. struct DeviceGroupRenderPassBeginInfo
  43595. {
  43596. static const bool allowDuplicate = false;
  43597. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupRenderPassBeginInfo;
  43598. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43599. VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo(uint32_t deviceMask_ = {}, uint32_t deviceRenderAreaCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ = {}) VULKAN_HPP_NOEXCEPT
  43600. : deviceMask( deviceMask_ ), deviceRenderAreaCount( deviceRenderAreaCount_ ), pDeviceRenderAreas( pDeviceRenderAreas_ )
  43601. {}
  43602. VULKAN_HPP_CONSTEXPR DeviceGroupRenderPassBeginInfo( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43603. DeviceGroupRenderPassBeginInfo( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43604. : DeviceGroupRenderPassBeginInfo( *reinterpret_cast<DeviceGroupRenderPassBeginInfo const *>( &rhs ) )
  43605. {}
  43606. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43607. DeviceGroupRenderPassBeginInfo( uint32_t deviceMask_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ )
  43608. : deviceMask( deviceMask_ ), deviceRenderAreaCount( static_cast<uint32_t>( deviceRenderAreas_.size() ) ), pDeviceRenderAreas( deviceRenderAreas_.data() )
  43609. {}
  43610. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43611. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43612. VULKAN_HPP_CONSTEXPR_14 DeviceGroupRenderPassBeginInfo & operator=( DeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43613. DeviceGroupRenderPassBeginInfo & operator=( VkDeviceGroupRenderPassBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43614. {
  43615. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo const *>( &rhs );
  43616. return *this;
  43617. }
  43618. DeviceGroupRenderPassBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43619. {
  43620. pNext = pNext_;
  43621. return *this;
  43622. }
  43623. DeviceGroupRenderPassBeginInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
  43624. {
  43625. deviceMask = deviceMask_;
  43626. return *this;
  43627. }
  43628. DeviceGroupRenderPassBeginInfo & setDeviceRenderAreaCount( uint32_t deviceRenderAreaCount_ ) VULKAN_HPP_NOEXCEPT
  43629. {
  43630. deviceRenderAreaCount = deviceRenderAreaCount_;
  43631. return *this;
  43632. }
  43633. DeviceGroupRenderPassBeginInfo & setPDeviceRenderAreas( const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
  43634. {
  43635. pDeviceRenderAreas = pDeviceRenderAreas_;
  43636. return *this;
  43637. }
  43638. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43639. DeviceGroupRenderPassBeginInfo & setDeviceRenderAreas( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & deviceRenderAreas_ ) VULKAN_HPP_NOEXCEPT
  43640. {
  43641. deviceRenderAreaCount = static_cast<uint32_t>( deviceRenderAreas_.size() );
  43642. pDeviceRenderAreas = deviceRenderAreas_.data();
  43643. return *this;
  43644. }
  43645. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43646. operator VkDeviceGroupRenderPassBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  43647. {
  43648. return *reinterpret_cast<const VkDeviceGroupRenderPassBeginInfo*>( this );
  43649. }
  43650. operator VkDeviceGroupRenderPassBeginInfo &() VULKAN_HPP_NOEXCEPT
  43651. {
  43652. return *reinterpret_cast<VkDeviceGroupRenderPassBeginInfo*>( this );
  43653. }
  43654. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43655. auto operator<=>( DeviceGroupRenderPassBeginInfo const& ) const = default;
  43656. #else
  43657. bool operator==( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43658. {
  43659. return ( sType == rhs.sType )
  43660. && ( pNext == rhs.pNext )
  43661. && ( deviceMask == rhs.deviceMask )
  43662. && ( deviceRenderAreaCount == rhs.deviceRenderAreaCount )
  43663. && ( pDeviceRenderAreas == rhs.pDeviceRenderAreas );
  43664. }
  43665. bool operator!=( DeviceGroupRenderPassBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43666. {
  43667. return !operator==( rhs );
  43668. }
  43669. #endif
  43670. public:
  43671. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupRenderPassBeginInfo;
  43672. const void* pNext = {};
  43673. uint32_t deviceMask = {};
  43674. uint32_t deviceRenderAreaCount = {};
  43675. const VULKAN_HPP_NAMESPACE::Rect2D* pDeviceRenderAreas = {};
  43676. };
  43677. static_assert( sizeof( DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ), "struct and wrapper have different size!" );
  43678. static_assert( std::is_standard_layout<DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
  43679. template <>
  43680. struct CppType<StructureType, StructureType::eDeviceGroupRenderPassBeginInfo>
  43681. {
  43682. using Type = DeviceGroupRenderPassBeginInfo;
  43683. };
  43684. using DeviceGroupRenderPassBeginInfoKHR = DeviceGroupRenderPassBeginInfo;
  43685. struct DeviceGroupSubmitInfo
  43686. {
  43687. static const bool allowDuplicate = false;
  43688. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSubmitInfo;
  43689. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43690. VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo(uint32_t waitSemaphoreCount_ = {}, const uint32_t* pWaitSemaphoreDeviceIndices_ = {}, uint32_t commandBufferCount_ = {}, const uint32_t* pCommandBufferDeviceMasks_ = {}, uint32_t signalSemaphoreCount_ = {}, const uint32_t* pSignalSemaphoreDeviceIndices_ = {}) VULKAN_HPP_NOEXCEPT
  43691. : waitSemaphoreCount( waitSemaphoreCount_ ), pWaitSemaphoreDeviceIndices( pWaitSemaphoreDeviceIndices_ ), commandBufferCount( commandBufferCount_ ), pCommandBufferDeviceMasks( pCommandBufferDeviceMasks_ ), signalSemaphoreCount( signalSemaphoreCount_ ), pSignalSemaphoreDeviceIndices( pSignalSemaphoreDeviceIndices_ )
  43692. {}
  43693. VULKAN_HPP_CONSTEXPR DeviceGroupSubmitInfo( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43694. DeviceGroupSubmitInfo( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43695. : DeviceGroupSubmitInfo( *reinterpret_cast<DeviceGroupSubmitInfo const *>( &rhs ) )
  43696. {}
  43697. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43698. DeviceGroupSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ = {} )
  43699. : waitSemaphoreCount( static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() ) ), pWaitSemaphoreDeviceIndices( waitSemaphoreDeviceIndices_.data() ), commandBufferCount( static_cast<uint32_t>( commandBufferDeviceMasks_.size() ) ), pCommandBufferDeviceMasks( commandBufferDeviceMasks_.data() ), signalSemaphoreCount( static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() ) ), pSignalSemaphoreDeviceIndices( signalSemaphoreDeviceIndices_.data() )
  43700. {}
  43701. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43702. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43703. VULKAN_HPP_CONSTEXPR_14 DeviceGroupSubmitInfo & operator=( DeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43704. DeviceGroupSubmitInfo & operator=( VkDeviceGroupSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  43705. {
  43706. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo const *>( &rhs );
  43707. return *this;
  43708. }
  43709. DeviceGroupSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43710. {
  43711. pNext = pNext_;
  43712. return *this;
  43713. }
  43714. DeviceGroupSubmitInfo & setWaitSemaphoreCount( uint32_t waitSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  43715. {
  43716. waitSemaphoreCount = waitSemaphoreCount_;
  43717. return *this;
  43718. }
  43719. DeviceGroupSubmitInfo & setPWaitSemaphoreDeviceIndices( const uint32_t* pWaitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  43720. {
  43721. pWaitSemaphoreDeviceIndices = pWaitSemaphoreDeviceIndices_;
  43722. return *this;
  43723. }
  43724. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43725. DeviceGroupSubmitInfo & setWaitSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & waitSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  43726. {
  43727. waitSemaphoreCount = static_cast<uint32_t>( waitSemaphoreDeviceIndices_.size() );
  43728. pWaitSemaphoreDeviceIndices = waitSemaphoreDeviceIndices_.data();
  43729. return *this;
  43730. }
  43731. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43732. DeviceGroupSubmitInfo & setCommandBufferCount( uint32_t commandBufferCount_ ) VULKAN_HPP_NOEXCEPT
  43733. {
  43734. commandBufferCount = commandBufferCount_;
  43735. return *this;
  43736. }
  43737. DeviceGroupSubmitInfo & setPCommandBufferDeviceMasks( const uint32_t* pCommandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
  43738. {
  43739. pCommandBufferDeviceMasks = pCommandBufferDeviceMasks_;
  43740. return *this;
  43741. }
  43742. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43743. DeviceGroupSubmitInfo & setCommandBufferDeviceMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & commandBufferDeviceMasks_ ) VULKAN_HPP_NOEXCEPT
  43744. {
  43745. commandBufferCount = static_cast<uint32_t>( commandBufferDeviceMasks_.size() );
  43746. pCommandBufferDeviceMasks = commandBufferDeviceMasks_.data();
  43747. return *this;
  43748. }
  43749. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43750. DeviceGroupSubmitInfo & setSignalSemaphoreCount( uint32_t signalSemaphoreCount_ ) VULKAN_HPP_NOEXCEPT
  43751. {
  43752. signalSemaphoreCount = signalSemaphoreCount_;
  43753. return *this;
  43754. }
  43755. DeviceGroupSubmitInfo & setPSignalSemaphoreDeviceIndices( const uint32_t* pSignalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  43756. {
  43757. pSignalSemaphoreDeviceIndices = pSignalSemaphoreDeviceIndices_;
  43758. return *this;
  43759. }
  43760. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43761. DeviceGroupSubmitInfo & setSignalSemaphoreDeviceIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & signalSemaphoreDeviceIndices_ ) VULKAN_HPP_NOEXCEPT
  43762. {
  43763. signalSemaphoreCount = static_cast<uint32_t>( signalSemaphoreDeviceIndices_.size() );
  43764. pSignalSemaphoreDeviceIndices = signalSemaphoreDeviceIndices_.data();
  43765. return *this;
  43766. }
  43767. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  43768. operator VkDeviceGroupSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
  43769. {
  43770. return *reinterpret_cast<const VkDeviceGroupSubmitInfo*>( this );
  43771. }
  43772. operator VkDeviceGroupSubmitInfo &() VULKAN_HPP_NOEXCEPT
  43773. {
  43774. return *reinterpret_cast<VkDeviceGroupSubmitInfo*>( this );
  43775. }
  43776. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43777. auto operator<=>( DeviceGroupSubmitInfo const& ) const = default;
  43778. #else
  43779. bool operator==( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43780. {
  43781. return ( sType == rhs.sType )
  43782. && ( pNext == rhs.pNext )
  43783. && ( waitSemaphoreCount == rhs.waitSemaphoreCount )
  43784. && ( pWaitSemaphoreDeviceIndices == rhs.pWaitSemaphoreDeviceIndices )
  43785. && ( commandBufferCount == rhs.commandBufferCount )
  43786. && ( pCommandBufferDeviceMasks == rhs.pCommandBufferDeviceMasks )
  43787. && ( signalSemaphoreCount == rhs.signalSemaphoreCount )
  43788. && ( pSignalSemaphoreDeviceIndices == rhs.pSignalSemaphoreDeviceIndices );
  43789. }
  43790. bool operator!=( DeviceGroupSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  43791. {
  43792. return !operator==( rhs );
  43793. }
  43794. #endif
  43795. public:
  43796. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSubmitInfo;
  43797. const void* pNext = {};
  43798. uint32_t waitSemaphoreCount = {};
  43799. const uint32_t* pWaitSemaphoreDeviceIndices = {};
  43800. uint32_t commandBufferCount = {};
  43801. const uint32_t* pCommandBufferDeviceMasks = {};
  43802. uint32_t signalSemaphoreCount = {};
  43803. const uint32_t* pSignalSemaphoreDeviceIndices = {};
  43804. };
  43805. static_assert( sizeof( DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ), "struct and wrapper have different size!" );
  43806. static_assert( std::is_standard_layout<DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
  43807. template <>
  43808. struct CppType<StructureType, StructureType::eDeviceGroupSubmitInfo>
  43809. {
  43810. using Type = DeviceGroupSubmitInfo;
  43811. };
  43812. using DeviceGroupSubmitInfoKHR = DeviceGroupSubmitInfo;
  43813. struct DeviceGroupSwapchainCreateInfoKHR
  43814. {
  43815. static const bool allowDuplicate = false;
  43816. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
  43817. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43818. VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ = {}) VULKAN_HPP_NOEXCEPT
  43819. : modes( modes_ )
  43820. {}
  43821. VULKAN_HPP_CONSTEXPR DeviceGroupSwapchainCreateInfoKHR( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43822. DeviceGroupSwapchainCreateInfoKHR( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  43823. : DeviceGroupSwapchainCreateInfoKHR( *reinterpret_cast<DeviceGroupSwapchainCreateInfoKHR const *>( &rhs ) )
  43824. {}
  43825. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43826. VULKAN_HPP_CONSTEXPR_14 DeviceGroupSwapchainCreateInfoKHR & operator=( DeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43827. DeviceGroupSwapchainCreateInfoKHR & operator=( VkDeviceGroupSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  43828. {
  43829. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR const *>( &rhs );
  43830. return *this;
  43831. }
  43832. DeviceGroupSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43833. {
  43834. pNext = pNext_;
  43835. return *this;
  43836. }
  43837. DeviceGroupSwapchainCreateInfoKHR & setModes( VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes_ ) VULKAN_HPP_NOEXCEPT
  43838. {
  43839. modes = modes_;
  43840. return *this;
  43841. }
  43842. operator VkDeviceGroupSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  43843. {
  43844. return *reinterpret_cast<const VkDeviceGroupSwapchainCreateInfoKHR*>( this );
  43845. }
  43846. operator VkDeviceGroupSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  43847. {
  43848. return *reinterpret_cast<VkDeviceGroupSwapchainCreateInfoKHR*>( this );
  43849. }
  43850. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43851. auto operator<=>( DeviceGroupSwapchainCreateInfoKHR const& ) const = default;
  43852. #else
  43853. bool operator==( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  43854. {
  43855. return ( sType == rhs.sType )
  43856. && ( pNext == rhs.pNext )
  43857. && ( modes == rhs.modes );
  43858. }
  43859. bool operator!=( DeviceGroupSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  43860. {
  43861. return !operator==( rhs );
  43862. }
  43863. #endif
  43864. public:
  43865. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceGroupSwapchainCreateInfoKHR;
  43866. const void* pNext = {};
  43867. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes = {};
  43868. };
  43869. static_assert( sizeof( DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
  43870. static_assert( std::is_standard_layout<DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  43871. template <>
  43872. struct CppType<StructureType, StructureType::eDeviceGroupSwapchainCreateInfoKHR>
  43873. {
  43874. using Type = DeviceGroupSwapchainCreateInfoKHR;
  43875. };
  43876. struct DeviceMemoryOverallocationCreateInfoAMD
  43877. {
  43878. static const bool allowDuplicate = false;
  43879. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
  43880. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43881. VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD(VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault) VULKAN_HPP_NOEXCEPT
  43882. : overallocationBehavior( overallocationBehavior_ )
  43883. {}
  43884. VULKAN_HPP_CONSTEXPR DeviceMemoryOverallocationCreateInfoAMD( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43885. DeviceMemoryOverallocationCreateInfoAMD( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  43886. : DeviceMemoryOverallocationCreateInfoAMD( *reinterpret_cast<DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs ) )
  43887. {}
  43888. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43889. VULKAN_HPP_CONSTEXPR_14 DeviceMemoryOverallocationCreateInfoAMD & operator=( DeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43890. DeviceMemoryOverallocationCreateInfoAMD & operator=( VkDeviceMemoryOverallocationCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  43891. {
  43892. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD const *>( &rhs );
  43893. return *this;
  43894. }
  43895. DeviceMemoryOverallocationCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  43896. {
  43897. pNext = pNext_;
  43898. return *this;
  43899. }
  43900. DeviceMemoryOverallocationCreateInfoAMD & setOverallocationBehavior( VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior_ ) VULKAN_HPP_NOEXCEPT
  43901. {
  43902. overallocationBehavior = overallocationBehavior_;
  43903. return *this;
  43904. }
  43905. operator VkDeviceMemoryOverallocationCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
  43906. {
  43907. return *reinterpret_cast<const VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
  43908. }
  43909. operator VkDeviceMemoryOverallocationCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
  43910. {
  43911. return *reinterpret_cast<VkDeviceMemoryOverallocationCreateInfoAMD*>( this );
  43912. }
  43913. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43914. auto operator<=>( DeviceMemoryOverallocationCreateInfoAMD const& ) const = default;
  43915. #else
  43916. bool operator==( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  43917. {
  43918. return ( sType == rhs.sType )
  43919. && ( pNext == rhs.pNext )
  43920. && ( overallocationBehavior == rhs.overallocationBehavior );
  43921. }
  43922. bool operator!=( DeviceMemoryOverallocationCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  43923. {
  43924. return !operator==( rhs );
  43925. }
  43926. #endif
  43927. public:
  43928. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryOverallocationCreateInfoAMD;
  43929. const void* pNext = {};
  43930. VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD overallocationBehavior = VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD::eDefault;
  43931. };
  43932. static_assert( sizeof( DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ), "struct and wrapper have different size!" );
  43933. static_assert( std::is_standard_layout<DeviceMemoryOverallocationCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
  43934. template <>
  43935. struct CppType<StructureType, StructureType::eDeviceMemoryOverallocationCreateInfoAMD>
  43936. {
  43937. using Type = DeviceMemoryOverallocationCreateInfoAMD;
  43938. };
  43939. struct DeviceMemoryReportCallbackDataEXT
  43940. {
  43941. static const bool allowDuplicate = false;
  43942. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceMemoryReportCallbackDataEXT;
  43943. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43944. VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT(VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type_ = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate, uint64_t memoryObjectId_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize size_ = {}, VULKAN_HPP_NAMESPACE::ObjectType objectType_ = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown, uint64_t objectHandle_ = {}, uint32_t heapIndex_ = {}) VULKAN_HPP_NOEXCEPT
  43945. : flags( flags_ ), type( type_ ), memoryObjectId( memoryObjectId_ ), size( size_ ), objectType( objectType_ ), objectHandle( objectHandle_ ), heapIndex( heapIndex_ )
  43946. {}
  43947. VULKAN_HPP_CONSTEXPR DeviceMemoryReportCallbackDataEXT( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43948. DeviceMemoryReportCallbackDataEXT( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  43949. : DeviceMemoryReportCallbackDataEXT( *reinterpret_cast<DeviceMemoryReportCallbackDataEXT const *>( &rhs ) )
  43950. {}
  43951. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  43952. VULKAN_HPP_CONSTEXPR_14 DeviceMemoryReportCallbackDataEXT & operator=( DeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  43953. DeviceMemoryReportCallbackDataEXT & operator=( VkDeviceMemoryReportCallbackDataEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  43954. {
  43955. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT const *>( &rhs );
  43956. return *this;
  43957. }
  43958. operator VkDeviceMemoryReportCallbackDataEXT const&() const VULKAN_HPP_NOEXCEPT
  43959. {
  43960. return *reinterpret_cast<const VkDeviceMemoryReportCallbackDataEXT*>( this );
  43961. }
  43962. operator VkDeviceMemoryReportCallbackDataEXT &() VULKAN_HPP_NOEXCEPT
  43963. {
  43964. return *reinterpret_cast<VkDeviceMemoryReportCallbackDataEXT*>( this );
  43965. }
  43966. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  43967. auto operator<=>( DeviceMemoryReportCallbackDataEXT const& ) const = default;
  43968. #else
  43969. bool operator==( DeviceMemoryReportCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  43970. {
  43971. return ( sType == rhs.sType )
  43972. && ( pNext == rhs.pNext )
  43973. && ( flags == rhs.flags )
  43974. && ( type == rhs.type )
  43975. && ( memoryObjectId == rhs.memoryObjectId )
  43976. && ( size == rhs.size )
  43977. && ( objectType == rhs.objectType )
  43978. && ( objectHandle == rhs.objectHandle )
  43979. && ( heapIndex == rhs.heapIndex );
  43980. }
  43981. bool operator!=( DeviceMemoryReportCallbackDataEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  43982. {
  43983. return !operator==( rhs );
  43984. }
  43985. #endif
  43986. public:
  43987. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceMemoryReportCallbackDataEXT;
  43988. const void* pNext = {};
  43989. VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT flags = {};
  43990. VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT type = VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT::eAllocate;
  43991. uint64_t memoryObjectId = {};
  43992. VULKAN_HPP_NAMESPACE::DeviceSize size = {};
  43993. VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eUnknown;
  43994. uint64_t objectHandle = {};
  43995. uint32_t heapIndex = {};
  43996. };
  43997. static_assert( sizeof( DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ), "struct and wrapper have different size!" );
  43998. static_assert( std::is_standard_layout<DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
  43999. template <>
  44000. struct CppType<StructureType, StructureType::eDeviceMemoryReportCallbackDataEXT>
  44001. {
  44002. using Type = DeviceMemoryReportCallbackDataEXT;
  44003. };
  44004. struct DevicePrivateDataCreateInfoEXT
  44005. {
  44006. static const bool allowDuplicate = true;
  44007. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDevicePrivateDataCreateInfoEXT;
  44008. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44009. VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT(uint32_t privateDataSlotRequestCount_ = {}) VULKAN_HPP_NOEXCEPT
  44010. : privateDataSlotRequestCount( privateDataSlotRequestCount_ )
  44011. {}
  44012. VULKAN_HPP_CONSTEXPR DevicePrivateDataCreateInfoEXT( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44013. DevicePrivateDataCreateInfoEXT( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44014. : DevicePrivateDataCreateInfoEXT( *reinterpret_cast<DevicePrivateDataCreateInfoEXT const *>( &rhs ) )
  44015. {}
  44016. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44017. VULKAN_HPP_CONSTEXPR_14 DevicePrivateDataCreateInfoEXT & operator=( DevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44018. DevicePrivateDataCreateInfoEXT & operator=( VkDevicePrivateDataCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44019. {
  44020. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT const *>( &rhs );
  44021. return *this;
  44022. }
  44023. DevicePrivateDataCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44024. {
  44025. pNext = pNext_;
  44026. return *this;
  44027. }
  44028. DevicePrivateDataCreateInfoEXT & setPrivateDataSlotRequestCount( uint32_t privateDataSlotRequestCount_ ) VULKAN_HPP_NOEXCEPT
  44029. {
  44030. privateDataSlotRequestCount = privateDataSlotRequestCount_;
  44031. return *this;
  44032. }
  44033. operator VkDevicePrivateDataCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  44034. {
  44035. return *reinterpret_cast<const VkDevicePrivateDataCreateInfoEXT*>( this );
  44036. }
  44037. operator VkDevicePrivateDataCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  44038. {
  44039. return *reinterpret_cast<VkDevicePrivateDataCreateInfoEXT*>( this );
  44040. }
  44041. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44042. auto operator<=>( DevicePrivateDataCreateInfoEXT const& ) const = default;
  44043. #else
  44044. bool operator==( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44045. {
  44046. return ( sType == rhs.sType )
  44047. && ( pNext == rhs.pNext )
  44048. && ( privateDataSlotRequestCount == rhs.privateDataSlotRequestCount );
  44049. }
  44050. bool operator!=( DevicePrivateDataCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44051. {
  44052. return !operator==( rhs );
  44053. }
  44054. #endif
  44055. public:
  44056. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDevicePrivateDataCreateInfoEXT;
  44057. const void* pNext = {};
  44058. uint32_t privateDataSlotRequestCount = {};
  44059. };
  44060. static_assert( sizeof( DevicePrivateDataCreateInfoEXT ) == sizeof( VkDevicePrivateDataCreateInfoEXT ), "struct and wrapper have different size!" );
  44061. static_assert( std::is_standard_layout<DevicePrivateDataCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  44062. template <>
  44063. struct CppType<StructureType, StructureType::eDevicePrivateDataCreateInfoEXT>
  44064. {
  44065. using Type = DevicePrivateDataCreateInfoEXT;
  44066. };
  44067. struct DeviceQueueGlobalPriorityCreateInfoEXT
  44068. {
  44069. static const bool allowDuplicate = false;
  44070. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
  44071. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44072. VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT(VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow) VULKAN_HPP_NOEXCEPT
  44073. : globalPriority( globalPriority_ )
  44074. {}
  44075. VULKAN_HPP_CONSTEXPR DeviceQueueGlobalPriorityCreateInfoEXT( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44076. DeviceQueueGlobalPriorityCreateInfoEXT( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44077. : DeviceQueueGlobalPriorityCreateInfoEXT( *reinterpret_cast<DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs ) )
  44078. {}
  44079. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44080. VULKAN_HPP_CONSTEXPR_14 DeviceQueueGlobalPriorityCreateInfoEXT & operator=( DeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44081. DeviceQueueGlobalPriorityCreateInfoEXT & operator=( VkDeviceQueueGlobalPriorityCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44082. {
  44083. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT const *>( &rhs );
  44084. return *this;
  44085. }
  44086. DeviceQueueGlobalPriorityCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44087. {
  44088. pNext = pNext_;
  44089. return *this;
  44090. }
  44091. DeviceQueueGlobalPriorityCreateInfoEXT & setGlobalPriority( VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority_ ) VULKAN_HPP_NOEXCEPT
  44092. {
  44093. globalPriority = globalPriority_;
  44094. return *this;
  44095. }
  44096. operator VkDeviceQueueGlobalPriorityCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  44097. {
  44098. return *reinterpret_cast<const VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
  44099. }
  44100. operator VkDeviceQueueGlobalPriorityCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  44101. {
  44102. return *reinterpret_cast<VkDeviceQueueGlobalPriorityCreateInfoEXT*>( this );
  44103. }
  44104. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44105. auto operator<=>( DeviceQueueGlobalPriorityCreateInfoEXT const& ) const = default;
  44106. #else
  44107. bool operator==( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44108. {
  44109. return ( sType == rhs.sType )
  44110. && ( pNext == rhs.pNext )
  44111. && ( globalPriority == rhs.globalPriority );
  44112. }
  44113. bool operator!=( DeviceQueueGlobalPriorityCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44114. {
  44115. return !operator==( rhs );
  44116. }
  44117. #endif
  44118. public:
  44119. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT;
  44120. const void* pNext = {};
  44121. VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT globalPriority = VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT::eLow;
  44122. };
  44123. static_assert( sizeof( DeviceQueueGlobalPriorityCreateInfoEXT ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoEXT ), "struct and wrapper have different size!" );
  44124. static_assert( std::is_standard_layout<DeviceQueueGlobalPriorityCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  44125. template <>
  44126. struct CppType<StructureType, StructureType::eDeviceQueueGlobalPriorityCreateInfoEXT>
  44127. {
  44128. using Type = DeviceQueueGlobalPriorityCreateInfoEXT;
  44129. };
  44130. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  44131. struct DirectFBSurfaceCreateInfoEXT
  44132. {
  44133. static const bool allowDuplicate = false;
  44134. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDirectfbSurfaceCreateInfoEXT;
  44135. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44136. VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ = {}, IDirectFB* dfb_ = {}, IDirectFBSurface* surface_ = {}) VULKAN_HPP_NOEXCEPT
  44137. : flags( flags_ ), dfb( dfb_ ), surface( surface_ )
  44138. {}
  44139. VULKAN_HPP_CONSTEXPR DirectFBSurfaceCreateInfoEXT( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44140. DirectFBSurfaceCreateInfoEXT( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44141. : DirectFBSurfaceCreateInfoEXT( *reinterpret_cast<DirectFBSurfaceCreateInfoEXT const *>( &rhs ) )
  44142. {}
  44143. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44144. VULKAN_HPP_CONSTEXPR_14 DirectFBSurfaceCreateInfoEXT & operator=( DirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44145. DirectFBSurfaceCreateInfoEXT & operator=( VkDirectFBSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44146. {
  44147. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT const *>( &rhs );
  44148. return *this;
  44149. }
  44150. DirectFBSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44151. {
  44152. pNext = pNext_;
  44153. return *this;
  44154. }
  44155. DirectFBSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  44156. {
  44157. flags = flags_;
  44158. return *this;
  44159. }
  44160. DirectFBSurfaceCreateInfoEXT & setDfb( IDirectFB* dfb_ ) VULKAN_HPP_NOEXCEPT
  44161. {
  44162. dfb = dfb_;
  44163. return *this;
  44164. }
  44165. DirectFBSurfaceCreateInfoEXT & setSurface( IDirectFBSurface* surface_ ) VULKAN_HPP_NOEXCEPT
  44166. {
  44167. surface = surface_;
  44168. return *this;
  44169. }
  44170. operator VkDirectFBSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  44171. {
  44172. return *reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT*>( this );
  44173. }
  44174. operator VkDirectFBSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  44175. {
  44176. return *reinterpret_cast<VkDirectFBSurfaceCreateInfoEXT*>( this );
  44177. }
  44178. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44179. auto operator<=>( DirectFBSurfaceCreateInfoEXT const& ) const = default;
  44180. #else
  44181. bool operator==( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44182. {
  44183. return ( sType == rhs.sType )
  44184. && ( pNext == rhs.pNext )
  44185. && ( flags == rhs.flags )
  44186. && ( dfb == rhs.dfb )
  44187. && ( surface == rhs.surface );
  44188. }
  44189. bool operator!=( DirectFBSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44190. {
  44191. return !operator==( rhs );
  44192. }
  44193. #endif
  44194. public:
  44195. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDirectfbSurfaceCreateInfoEXT;
  44196. const void* pNext = {};
  44197. VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT flags = {};
  44198. IDirectFB* dfb = {};
  44199. IDirectFBSurface* surface = {};
  44200. };
  44201. static_assert( sizeof( DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
  44202. static_assert( std::is_standard_layout<DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  44203. template <>
  44204. struct CppType<StructureType, StructureType::eDirectfbSurfaceCreateInfoEXT>
  44205. {
  44206. using Type = DirectFBSurfaceCreateInfoEXT;
  44207. };
  44208. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  44209. struct DispatchIndirectCommand
  44210. {
  44211. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44212. VULKAN_HPP_CONSTEXPR DispatchIndirectCommand(uint32_t x_ = {}, uint32_t y_ = {}, uint32_t z_ = {}) VULKAN_HPP_NOEXCEPT
  44213. : x( x_ ), y( y_ ), z( z_ )
  44214. {}
  44215. VULKAN_HPP_CONSTEXPR DispatchIndirectCommand( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44216. DispatchIndirectCommand( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44217. : DispatchIndirectCommand( *reinterpret_cast<DispatchIndirectCommand const *>( &rhs ) )
  44218. {}
  44219. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44220. VULKAN_HPP_CONSTEXPR_14 DispatchIndirectCommand & operator=( DispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44221. DispatchIndirectCommand & operator=( VkDispatchIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44222. {
  44223. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand const *>( &rhs );
  44224. return *this;
  44225. }
  44226. DispatchIndirectCommand & setX( uint32_t x_ ) VULKAN_HPP_NOEXCEPT
  44227. {
  44228. x = x_;
  44229. return *this;
  44230. }
  44231. DispatchIndirectCommand & setY( uint32_t y_ ) VULKAN_HPP_NOEXCEPT
  44232. {
  44233. y = y_;
  44234. return *this;
  44235. }
  44236. DispatchIndirectCommand & setZ( uint32_t z_ ) VULKAN_HPP_NOEXCEPT
  44237. {
  44238. z = z_;
  44239. return *this;
  44240. }
  44241. operator VkDispatchIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
  44242. {
  44243. return *reinterpret_cast<const VkDispatchIndirectCommand*>( this );
  44244. }
  44245. operator VkDispatchIndirectCommand &() VULKAN_HPP_NOEXCEPT
  44246. {
  44247. return *reinterpret_cast<VkDispatchIndirectCommand*>( this );
  44248. }
  44249. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44250. auto operator<=>( DispatchIndirectCommand const& ) const = default;
  44251. #else
  44252. bool operator==( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44253. {
  44254. return ( x == rhs.x )
  44255. && ( y == rhs.y )
  44256. && ( z == rhs.z );
  44257. }
  44258. bool operator!=( DispatchIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44259. {
  44260. return !operator==( rhs );
  44261. }
  44262. #endif
  44263. public:
  44264. uint32_t x = {};
  44265. uint32_t y = {};
  44266. uint32_t z = {};
  44267. };
  44268. static_assert( sizeof( DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ), "struct and wrapper have different size!" );
  44269. static_assert( std::is_standard_layout<DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
  44270. struct DisplayNativeHdrSurfaceCapabilitiesAMD
  44271. {
  44272. static const bool allowDuplicate = false;
  44273. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
  44274. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44275. VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport_ = {}) VULKAN_HPP_NOEXCEPT
  44276. : localDimmingSupport( localDimmingSupport_ )
  44277. {}
  44278. VULKAN_HPP_CONSTEXPR DisplayNativeHdrSurfaceCapabilitiesAMD( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44279. DisplayNativeHdrSurfaceCapabilitiesAMD( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  44280. : DisplayNativeHdrSurfaceCapabilitiesAMD( *reinterpret_cast<DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs ) )
  44281. {}
  44282. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44283. VULKAN_HPP_CONSTEXPR_14 DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( DisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44284. DisplayNativeHdrSurfaceCapabilitiesAMD & operator=( VkDisplayNativeHdrSurfaceCapabilitiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  44285. {
  44286. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD const *>( &rhs );
  44287. return *this;
  44288. }
  44289. operator VkDisplayNativeHdrSurfaceCapabilitiesAMD const&() const VULKAN_HPP_NOEXCEPT
  44290. {
  44291. return *reinterpret_cast<const VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
  44292. }
  44293. operator VkDisplayNativeHdrSurfaceCapabilitiesAMD &() VULKAN_HPP_NOEXCEPT
  44294. {
  44295. return *reinterpret_cast<VkDisplayNativeHdrSurfaceCapabilitiesAMD*>( this );
  44296. }
  44297. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44298. auto operator<=>( DisplayNativeHdrSurfaceCapabilitiesAMD const& ) const = default;
  44299. #else
  44300. bool operator==( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  44301. {
  44302. return ( sType == rhs.sType )
  44303. && ( pNext == rhs.pNext )
  44304. && ( localDimmingSupport == rhs.localDimmingSupport );
  44305. }
  44306. bool operator!=( DisplayNativeHdrSurfaceCapabilitiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  44307. {
  44308. return !operator==( rhs );
  44309. }
  44310. #endif
  44311. public:
  44312. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD;
  44313. void* pNext = {};
  44314. VULKAN_HPP_NAMESPACE::Bool32 localDimmingSupport = {};
  44315. };
  44316. static_assert( sizeof( DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ), "struct and wrapper have different size!" );
  44317. static_assert( std::is_standard_layout<DisplayNativeHdrSurfaceCapabilitiesAMD>::value, "struct wrapper is not a standard layout!" );
  44318. template <>
  44319. struct CppType<StructureType, StructureType::eDisplayNativeHdrSurfaceCapabilitiesAMD>
  44320. {
  44321. using Type = DisplayNativeHdrSurfaceCapabilitiesAMD;
  44322. };
  44323. struct DisplayPresentInfoKHR
  44324. {
  44325. static const bool allowDuplicate = false;
  44326. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplayPresentInfoKHR;
  44327. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44328. VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR(VULKAN_HPP_NAMESPACE::Rect2D srcRect_ = {}, VULKAN_HPP_NAMESPACE::Rect2D dstRect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 persistent_ = {}) VULKAN_HPP_NOEXCEPT
  44329. : srcRect( srcRect_ ), dstRect( dstRect_ ), persistent( persistent_ )
  44330. {}
  44331. VULKAN_HPP_CONSTEXPR DisplayPresentInfoKHR( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44332. DisplayPresentInfoKHR( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44333. : DisplayPresentInfoKHR( *reinterpret_cast<DisplayPresentInfoKHR const *>( &rhs ) )
  44334. {}
  44335. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44336. VULKAN_HPP_CONSTEXPR_14 DisplayPresentInfoKHR & operator=( DisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44337. DisplayPresentInfoKHR & operator=( VkDisplayPresentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44338. {
  44339. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR const *>( &rhs );
  44340. return *this;
  44341. }
  44342. DisplayPresentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44343. {
  44344. pNext = pNext_;
  44345. return *this;
  44346. }
  44347. DisplayPresentInfoKHR & setSrcRect( VULKAN_HPP_NAMESPACE::Rect2D const & srcRect_ ) VULKAN_HPP_NOEXCEPT
  44348. {
  44349. srcRect = srcRect_;
  44350. return *this;
  44351. }
  44352. DisplayPresentInfoKHR & setDstRect( VULKAN_HPP_NAMESPACE::Rect2D const & dstRect_ ) VULKAN_HPP_NOEXCEPT
  44353. {
  44354. dstRect = dstRect_;
  44355. return *this;
  44356. }
  44357. DisplayPresentInfoKHR & setPersistent( VULKAN_HPP_NAMESPACE::Bool32 persistent_ ) VULKAN_HPP_NOEXCEPT
  44358. {
  44359. persistent = persistent_;
  44360. return *this;
  44361. }
  44362. operator VkDisplayPresentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  44363. {
  44364. return *reinterpret_cast<const VkDisplayPresentInfoKHR*>( this );
  44365. }
  44366. operator VkDisplayPresentInfoKHR &() VULKAN_HPP_NOEXCEPT
  44367. {
  44368. return *reinterpret_cast<VkDisplayPresentInfoKHR*>( this );
  44369. }
  44370. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44371. auto operator<=>( DisplayPresentInfoKHR const& ) const = default;
  44372. #else
  44373. bool operator==( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44374. {
  44375. return ( sType == rhs.sType )
  44376. && ( pNext == rhs.pNext )
  44377. && ( srcRect == rhs.srcRect )
  44378. && ( dstRect == rhs.dstRect )
  44379. && ( persistent == rhs.persistent );
  44380. }
  44381. bool operator!=( DisplayPresentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44382. {
  44383. return !operator==( rhs );
  44384. }
  44385. #endif
  44386. public:
  44387. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplayPresentInfoKHR;
  44388. const void* pNext = {};
  44389. VULKAN_HPP_NAMESPACE::Rect2D srcRect = {};
  44390. VULKAN_HPP_NAMESPACE::Rect2D dstRect = {};
  44391. VULKAN_HPP_NAMESPACE::Bool32 persistent = {};
  44392. };
  44393. static_assert( sizeof( DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ), "struct and wrapper have different size!" );
  44394. static_assert( std::is_standard_layout<DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
  44395. template <>
  44396. struct CppType<StructureType, StructureType::eDisplayPresentInfoKHR>
  44397. {
  44398. using Type = DisplayPresentInfoKHR;
  44399. };
  44400. struct DisplaySurfaceCreateInfoKHR
  44401. {
  44402. static const bool allowDuplicate = false;
  44403. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDisplaySurfaceCreateInfoKHR;
  44404. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44405. VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ = {}, VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ = {}, uint32_t planeIndex_ = {}, uint32_t planeStackIndex_ = {}, VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity, float globalAlpha_ = {}, VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque, VULKAN_HPP_NAMESPACE::Extent2D imageExtent_ = {}) VULKAN_HPP_NOEXCEPT
  44406. : flags( flags_ ), displayMode( displayMode_ ), planeIndex( planeIndex_ ), planeStackIndex( planeStackIndex_ ), transform( transform_ ), globalAlpha( globalAlpha_ ), alphaMode( alphaMode_ ), imageExtent( imageExtent_ )
  44407. {}
  44408. VULKAN_HPP_CONSTEXPR DisplaySurfaceCreateInfoKHR( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44409. DisplaySurfaceCreateInfoKHR( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44410. : DisplaySurfaceCreateInfoKHR( *reinterpret_cast<DisplaySurfaceCreateInfoKHR const *>( &rhs ) )
  44411. {}
  44412. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44413. VULKAN_HPP_CONSTEXPR_14 DisplaySurfaceCreateInfoKHR & operator=( DisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44414. DisplaySurfaceCreateInfoKHR & operator=( VkDisplaySurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44415. {
  44416. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR const *>( &rhs );
  44417. return *this;
  44418. }
  44419. DisplaySurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44420. {
  44421. pNext = pNext_;
  44422. return *this;
  44423. }
  44424. DisplaySurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  44425. {
  44426. flags = flags_;
  44427. return *this;
  44428. }
  44429. DisplaySurfaceCreateInfoKHR & setDisplayMode( VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode_ ) VULKAN_HPP_NOEXCEPT
  44430. {
  44431. displayMode = displayMode_;
  44432. return *this;
  44433. }
  44434. DisplaySurfaceCreateInfoKHR & setPlaneIndex( uint32_t planeIndex_ ) VULKAN_HPP_NOEXCEPT
  44435. {
  44436. planeIndex = planeIndex_;
  44437. return *this;
  44438. }
  44439. DisplaySurfaceCreateInfoKHR & setPlaneStackIndex( uint32_t planeStackIndex_ ) VULKAN_HPP_NOEXCEPT
  44440. {
  44441. planeStackIndex = planeStackIndex_;
  44442. return *this;
  44443. }
  44444. DisplaySurfaceCreateInfoKHR & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
  44445. {
  44446. transform = transform_;
  44447. return *this;
  44448. }
  44449. DisplaySurfaceCreateInfoKHR & setGlobalAlpha( float globalAlpha_ ) VULKAN_HPP_NOEXCEPT
  44450. {
  44451. globalAlpha = globalAlpha_;
  44452. return *this;
  44453. }
  44454. DisplaySurfaceCreateInfoKHR & setAlphaMode( VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode_ ) VULKAN_HPP_NOEXCEPT
  44455. {
  44456. alphaMode = alphaMode_;
  44457. return *this;
  44458. }
  44459. DisplaySurfaceCreateInfoKHR & setImageExtent( VULKAN_HPP_NAMESPACE::Extent2D const & imageExtent_ ) VULKAN_HPP_NOEXCEPT
  44460. {
  44461. imageExtent = imageExtent_;
  44462. return *this;
  44463. }
  44464. operator VkDisplaySurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  44465. {
  44466. return *reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR*>( this );
  44467. }
  44468. operator VkDisplaySurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  44469. {
  44470. return *reinterpret_cast<VkDisplaySurfaceCreateInfoKHR*>( this );
  44471. }
  44472. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44473. auto operator<=>( DisplaySurfaceCreateInfoKHR const& ) const = default;
  44474. #else
  44475. bool operator==( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44476. {
  44477. return ( sType == rhs.sType )
  44478. && ( pNext == rhs.pNext )
  44479. && ( flags == rhs.flags )
  44480. && ( displayMode == rhs.displayMode )
  44481. && ( planeIndex == rhs.planeIndex )
  44482. && ( planeStackIndex == rhs.planeStackIndex )
  44483. && ( transform == rhs.transform )
  44484. && ( globalAlpha == rhs.globalAlpha )
  44485. && ( alphaMode == rhs.alphaMode )
  44486. && ( imageExtent == rhs.imageExtent );
  44487. }
  44488. bool operator!=( DisplaySurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44489. {
  44490. return !operator==( rhs );
  44491. }
  44492. #endif
  44493. public:
  44494. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDisplaySurfaceCreateInfoKHR;
  44495. const void* pNext = {};
  44496. VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR flags = {};
  44497. VULKAN_HPP_NAMESPACE::DisplayModeKHR displayMode = {};
  44498. uint32_t planeIndex = {};
  44499. uint32_t planeStackIndex = {};
  44500. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  44501. float globalAlpha = {};
  44502. VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR alphaMode = VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR::eOpaque;
  44503. VULKAN_HPP_NAMESPACE::Extent2D imageExtent = {};
  44504. };
  44505. static_assert( sizeof( DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  44506. static_assert( std::is_standard_layout<DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  44507. template <>
  44508. struct CppType<StructureType, StructureType::eDisplaySurfaceCreateInfoKHR>
  44509. {
  44510. using Type = DisplaySurfaceCreateInfoKHR;
  44511. };
  44512. struct DrawIndexedIndirectCommand
  44513. {
  44514. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44515. VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand(uint32_t indexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstIndex_ = {}, int32_t vertexOffset_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
  44516. : indexCount( indexCount_ ), instanceCount( instanceCount_ ), firstIndex( firstIndex_ ), vertexOffset( vertexOffset_ ), firstInstance( firstInstance_ )
  44517. {}
  44518. VULKAN_HPP_CONSTEXPR DrawIndexedIndirectCommand( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44519. DrawIndexedIndirectCommand( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44520. : DrawIndexedIndirectCommand( *reinterpret_cast<DrawIndexedIndirectCommand const *>( &rhs ) )
  44521. {}
  44522. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44523. VULKAN_HPP_CONSTEXPR_14 DrawIndexedIndirectCommand & operator=( DrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44524. DrawIndexedIndirectCommand & operator=( VkDrawIndexedIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44525. {
  44526. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand const *>( &rhs );
  44527. return *this;
  44528. }
  44529. DrawIndexedIndirectCommand & setIndexCount( uint32_t indexCount_ ) VULKAN_HPP_NOEXCEPT
  44530. {
  44531. indexCount = indexCount_;
  44532. return *this;
  44533. }
  44534. DrawIndexedIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
  44535. {
  44536. instanceCount = instanceCount_;
  44537. return *this;
  44538. }
  44539. DrawIndexedIndirectCommand & setFirstIndex( uint32_t firstIndex_ ) VULKAN_HPP_NOEXCEPT
  44540. {
  44541. firstIndex = firstIndex_;
  44542. return *this;
  44543. }
  44544. DrawIndexedIndirectCommand & setVertexOffset( int32_t vertexOffset_ ) VULKAN_HPP_NOEXCEPT
  44545. {
  44546. vertexOffset = vertexOffset_;
  44547. return *this;
  44548. }
  44549. DrawIndexedIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
  44550. {
  44551. firstInstance = firstInstance_;
  44552. return *this;
  44553. }
  44554. operator VkDrawIndexedIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
  44555. {
  44556. return *reinterpret_cast<const VkDrawIndexedIndirectCommand*>( this );
  44557. }
  44558. operator VkDrawIndexedIndirectCommand &() VULKAN_HPP_NOEXCEPT
  44559. {
  44560. return *reinterpret_cast<VkDrawIndexedIndirectCommand*>( this );
  44561. }
  44562. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44563. auto operator<=>( DrawIndexedIndirectCommand const& ) const = default;
  44564. #else
  44565. bool operator==( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44566. {
  44567. return ( indexCount == rhs.indexCount )
  44568. && ( instanceCount == rhs.instanceCount )
  44569. && ( firstIndex == rhs.firstIndex )
  44570. && ( vertexOffset == rhs.vertexOffset )
  44571. && ( firstInstance == rhs.firstInstance );
  44572. }
  44573. bool operator!=( DrawIndexedIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44574. {
  44575. return !operator==( rhs );
  44576. }
  44577. #endif
  44578. public:
  44579. uint32_t indexCount = {};
  44580. uint32_t instanceCount = {};
  44581. uint32_t firstIndex = {};
  44582. int32_t vertexOffset = {};
  44583. uint32_t firstInstance = {};
  44584. };
  44585. static_assert( sizeof( DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ), "struct and wrapper have different size!" );
  44586. static_assert( std::is_standard_layout<DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
  44587. struct DrawIndirectCommand
  44588. {
  44589. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44590. VULKAN_HPP_CONSTEXPR DrawIndirectCommand(uint32_t vertexCount_ = {}, uint32_t instanceCount_ = {}, uint32_t firstVertex_ = {}, uint32_t firstInstance_ = {}) VULKAN_HPP_NOEXCEPT
  44591. : vertexCount( vertexCount_ ), instanceCount( instanceCount_ ), firstVertex( firstVertex_ ), firstInstance( firstInstance_ )
  44592. {}
  44593. VULKAN_HPP_CONSTEXPR DrawIndirectCommand( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44594. DrawIndirectCommand( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44595. : DrawIndirectCommand( *reinterpret_cast<DrawIndirectCommand const *>( &rhs ) )
  44596. {}
  44597. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44598. VULKAN_HPP_CONSTEXPR_14 DrawIndirectCommand & operator=( DrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44599. DrawIndirectCommand & operator=( VkDrawIndirectCommand const & rhs ) VULKAN_HPP_NOEXCEPT
  44600. {
  44601. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawIndirectCommand const *>( &rhs );
  44602. return *this;
  44603. }
  44604. DrawIndirectCommand & setVertexCount( uint32_t vertexCount_ ) VULKAN_HPP_NOEXCEPT
  44605. {
  44606. vertexCount = vertexCount_;
  44607. return *this;
  44608. }
  44609. DrawIndirectCommand & setInstanceCount( uint32_t instanceCount_ ) VULKAN_HPP_NOEXCEPT
  44610. {
  44611. instanceCount = instanceCount_;
  44612. return *this;
  44613. }
  44614. DrawIndirectCommand & setFirstVertex( uint32_t firstVertex_ ) VULKAN_HPP_NOEXCEPT
  44615. {
  44616. firstVertex = firstVertex_;
  44617. return *this;
  44618. }
  44619. DrawIndirectCommand & setFirstInstance( uint32_t firstInstance_ ) VULKAN_HPP_NOEXCEPT
  44620. {
  44621. firstInstance = firstInstance_;
  44622. return *this;
  44623. }
  44624. operator VkDrawIndirectCommand const&() const VULKAN_HPP_NOEXCEPT
  44625. {
  44626. return *reinterpret_cast<const VkDrawIndirectCommand*>( this );
  44627. }
  44628. operator VkDrawIndirectCommand &() VULKAN_HPP_NOEXCEPT
  44629. {
  44630. return *reinterpret_cast<VkDrawIndirectCommand*>( this );
  44631. }
  44632. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44633. auto operator<=>( DrawIndirectCommand const& ) const = default;
  44634. #else
  44635. bool operator==( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44636. {
  44637. return ( vertexCount == rhs.vertexCount )
  44638. && ( instanceCount == rhs.instanceCount )
  44639. && ( firstVertex == rhs.firstVertex )
  44640. && ( firstInstance == rhs.firstInstance );
  44641. }
  44642. bool operator!=( DrawIndirectCommand const& rhs ) const VULKAN_HPP_NOEXCEPT
  44643. {
  44644. return !operator==( rhs );
  44645. }
  44646. #endif
  44647. public:
  44648. uint32_t vertexCount = {};
  44649. uint32_t instanceCount = {};
  44650. uint32_t firstVertex = {};
  44651. uint32_t firstInstance = {};
  44652. };
  44653. static_assert( sizeof( DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
  44654. static_assert( std::is_standard_layout<DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
  44655. struct DrawMeshTasksIndirectCommandNV
  44656. {
  44657. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44658. VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV(uint32_t taskCount_ = {}, uint32_t firstTask_ = {}) VULKAN_HPP_NOEXCEPT
  44659. : taskCount( taskCount_ ), firstTask( firstTask_ )
  44660. {}
  44661. VULKAN_HPP_CONSTEXPR DrawMeshTasksIndirectCommandNV( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44662. DrawMeshTasksIndirectCommandNV( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  44663. : DrawMeshTasksIndirectCommandNV( *reinterpret_cast<DrawMeshTasksIndirectCommandNV const *>( &rhs ) )
  44664. {}
  44665. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44666. VULKAN_HPP_CONSTEXPR_14 DrawMeshTasksIndirectCommandNV & operator=( DrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44667. DrawMeshTasksIndirectCommandNV & operator=( VkDrawMeshTasksIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  44668. {
  44669. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV const *>( &rhs );
  44670. return *this;
  44671. }
  44672. DrawMeshTasksIndirectCommandNV & setTaskCount( uint32_t taskCount_ ) VULKAN_HPP_NOEXCEPT
  44673. {
  44674. taskCount = taskCount_;
  44675. return *this;
  44676. }
  44677. DrawMeshTasksIndirectCommandNV & setFirstTask( uint32_t firstTask_ ) VULKAN_HPP_NOEXCEPT
  44678. {
  44679. firstTask = firstTask_;
  44680. return *this;
  44681. }
  44682. operator VkDrawMeshTasksIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
  44683. {
  44684. return *reinterpret_cast<const VkDrawMeshTasksIndirectCommandNV*>( this );
  44685. }
  44686. operator VkDrawMeshTasksIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
  44687. {
  44688. return *reinterpret_cast<VkDrawMeshTasksIndirectCommandNV*>( this );
  44689. }
  44690. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44691. auto operator<=>( DrawMeshTasksIndirectCommandNV const& ) const = default;
  44692. #else
  44693. bool operator==( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  44694. {
  44695. return ( taskCount == rhs.taskCount )
  44696. && ( firstTask == rhs.firstTask );
  44697. }
  44698. bool operator!=( DrawMeshTasksIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  44699. {
  44700. return !operator==( rhs );
  44701. }
  44702. #endif
  44703. public:
  44704. uint32_t taskCount = {};
  44705. uint32_t firstTask = {};
  44706. };
  44707. static_assert( sizeof( DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ), "struct and wrapper have different size!" );
  44708. static_assert( std::is_standard_layout<DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
  44709. struct DrmFormatModifierPropertiesEXT
  44710. {
  44711. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44712. VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  44713. : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), drmFormatModifierTilingFeatures( drmFormatModifierTilingFeatures_ )
  44714. {}
  44715. VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesEXT( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44716. DrmFormatModifierPropertiesEXT( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44717. : DrmFormatModifierPropertiesEXT( *reinterpret_cast<DrmFormatModifierPropertiesEXT const *>( &rhs ) )
  44718. {}
  44719. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44720. VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesEXT & operator=( DrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44721. DrmFormatModifierPropertiesEXT & operator=( VkDrmFormatModifierPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44722. {
  44723. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT const *>( &rhs );
  44724. return *this;
  44725. }
  44726. operator VkDrmFormatModifierPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  44727. {
  44728. return *reinterpret_cast<const VkDrmFormatModifierPropertiesEXT*>( this );
  44729. }
  44730. operator VkDrmFormatModifierPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  44731. {
  44732. return *reinterpret_cast<VkDrmFormatModifierPropertiesEXT*>( this );
  44733. }
  44734. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44735. auto operator<=>( DrmFormatModifierPropertiesEXT const& ) const = default;
  44736. #else
  44737. bool operator==( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44738. {
  44739. return ( drmFormatModifier == rhs.drmFormatModifier )
  44740. && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
  44741. && ( drmFormatModifierTilingFeatures == rhs.drmFormatModifierTilingFeatures );
  44742. }
  44743. bool operator!=( DrmFormatModifierPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44744. {
  44745. return !operator==( rhs );
  44746. }
  44747. #endif
  44748. public:
  44749. uint64_t drmFormatModifier = {};
  44750. uint32_t drmFormatModifierPlaneCount = {};
  44751. VULKAN_HPP_NAMESPACE::FormatFeatureFlags drmFormatModifierTilingFeatures = {};
  44752. };
  44753. static_assert( sizeof( DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ), "struct and wrapper have different size!" );
  44754. static_assert( std::is_standard_layout<DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  44755. struct DrmFormatModifierPropertiesListEXT
  44756. {
  44757. static const bool allowDuplicate = false;
  44758. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eDrmFormatModifierPropertiesListEXT;
  44759. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44760. VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT(uint32_t drmFormatModifierCount_ = {}, VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties_ = {}) VULKAN_HPP_NOEXCEPT
  44761. : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifierProperties( pDrmFormatModifierProperties_ )
  44762. {}
  44763. VULKAN_HPP_CONSTEXPR DrmFormatModifierPropertiesListEXT( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44764. DrmFormatModifierPropertiesListEXT( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44765. : DrmFormatModifierPropertiesListEXT( *reinterpret_cast<DrmFormatModifierPropertiesListEXT const *>( &rhs ) )
  44766. {}
  44767. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  44768. DrmFormatModifierPropertiesListEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT> const & drmFormatModifierProperties_ )
  44769. : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifierProperties_.size() ) ), pDrmFormatModifierProperties( drmFormatModifierProperties_.data() )
  44770. {}
  44771. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  44772. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44773. VULKAN_HPP_CONSTEXPR_14 DrmFormatModifierPropertiesListEXT & operator=( DrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44774. DrmFormatModifierPropertiesListEXT & operator=( VkDrmFormatModifierPropertiesListEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  44775. {
  44776. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT const *>( &rhs );
  44777. return *this;
  44778. }
  44779. operator VkDrmFormatModifierPropertiesListEXT const&() const VULKAN_HPP_NOEXCEPT
  44780. {
  44781. return *reinterpret_cast<const VkDrmFormatModifierPropertiesListEXT*>( this );
  44782. }
  44783. operator VkDrmFormatModifierPropertiesListEXT &() VULKAN_HPP_NOEXCEPT
  44784. {
  44785. return *reinterpret_cast<VkDrmFormatModifierPropertiesListEXT*>( this );
  44786. }
  44787. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44788. auto operator<=>( DrmFormatModifierPropertiesListEXT const& ) const = default;
  44789. #else
  44790. bool operator==( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44791. {
  44792. return ( sType == rhs.sType )
  44793. && ( pNext == rhs.pNext )
  44794. && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
  44795. && ( pDrmFormatModifierProperties == rhs.pDrmFormatModifierProperties );
  44796. }
  44797. bool operator!=( DrmFormatModifierPropertiesListEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  44798. {
  44799. return !operator==( rhs );
  44800. }
  44801. #endif
  44802. public:
  44803. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eDrmFormatModifierPropertiesListEXT;
  44804. void* pNext = {};
  44805. uint32_t drmFormatModifierCount = {};
  44806. VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties = {};
  44807. };
  44808. static_assert( sizeof( DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ), "struct and wrapper have different size!" );
  44809. static_assert( std::is_standard_layout<DrmFormatModifierPropertiesListEXT>::value, "struct wrapper is not a standard layout!" );
  44810. template <>
  44811. struct CppType<StructureType, StructureType::eDrmFormatModifierPropertiesListEXT>
  44812. {
  44813. using Type = DrmFormatModifierPropertiesListEXT;
  44814. };
  44815. struct ExportFenceCreateInfo
  44816. {
  44817. static const bool allowDuplicate = false;
  44818. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceCreateInfo;
  44819. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44820. VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo(VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  44821. : handleTypes( handleTypes_ )
  44822. {}
  44823. VULKAN_HPP_CONSTEXPR ExportFenceCreateInfo( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44824. ExportFenceCreateInfo( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  44825. : ExportFenceCreateInfo( *reinterpret_cast<ExportFenceCreateInfo const *>( &rhs ) )
  44826. {}
  44827. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44828. VULKAN_HPP_CONSTEXPR_14 ExportFenceCreateInfo & operator=( ExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44829. ExportFenceCreateInfo & operator=( VkExportFenceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  44830. {
  44831. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo const *>( &rhs );
  44832. return *this;
  44833. }
  44834. ExportFenceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44835. {
  44836. pNext = pNext_;
  44837. return *this;
  44838. }
  44839. ExportFenceCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
  44840. {
  44841. handleTypes = handleTypes_;
  44842. return *this;
  44843. }
  44844. operator VkExportFenceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  44845. {
  44846. return *reinterpret_cast<const VkExportFenceCreateInfo*>( this );
  44847. }
  44848. operator VkExportFenceCreateInfo &() VULKAN_HPP_NOEXCEPT
  44849. {
  44850. return *reinterpret_cast<VkExportFenceCreateInfo*>( this );
  44851. }
  44852. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44853. auto operator<=>( ExportFenceCreateInfo const& ) const = default;
  44854. #else
  44855. bool operator==( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  44856. {
  44857. return ( sType == rhs.sType )
  44858. && ( pNext == rhs.pNext )
  44859. && ( handleTypes == rhs.handleTypes );
  44860. }
  44861. bool operator!=( ExportFenceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  44862. {
  44863. return !operator==( rhs );
  44864. }
  44865. #endif
  44866. public:
  44867. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceCreateInfo;
  44868. const void* pNext = {};
  44869. VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags handleTypes = {};
  44870. };
  44871. static_assert( sizeof( ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ), "struct and wrapper have different size!" );
  44872. static_assert( std::is_standard_layout<ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
  44873. template <>
  44874. struct CppType<StructureType, StructureType::eExportFenceCreateInfo>
  44875. {
  44876. using Type = ExportFenceCreateInfo;
  44877. };
  44878. using ExportFenceCreateInfoKHR = ExportFenceCreateInfo;
  44879. #ifdef VK_USE_PLATFORM_WIN32_KHR
  44880. struct ExportFenceWin32HandleInfoKHR
  44881. {
  44882. static const bool allowDuplicate = false;
  44883. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportFenceWin32HandleInfoKHR;
  44884. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44885. VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  44886. : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
  44887. {}
  44888. VULKAN_HPP_CONSTEXPR ExportFenceWin32HandleInfoKHR( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44889. ExportFenceWin32HandleInfoKHR( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44890. : ExportFenceWin32HandleInfoKHR( *reinterpret_cast<ExportFenceWin32HandleInfoKHR const *>( &rhs ) )
  44891. {}
  44892. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44893. VULKAN_HPP_CONSTEXPR_14 ExportFenceWin32HandleInfoKHR & operator=( ExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44894. ExportFenceWin32HandleInfoKHR & operator=( VkExportFenceWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  44895. {
  44896. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR const *>( &rhs );
  44897. return *this;
  44898. }
  44899. ExportFenceWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44900. {
  44901. pNext = pNext_;
  44902. return *this;
  44903. }
  44904. ExportFenceWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
  44905. {
  44906. pAttributes = pAttributes_;
  44907. return *this;
  44908. }
  44909. ExportFenceWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
  44910. {
  44911. dwAccess = dwAccess_;
  44912. return *this;
  44913. }
  44914. ExportFenceWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  44915. {
  44916. name = name_;
  44917. return *this;
  44918. }
  44919. operator VkExportFenceWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  44920. {
  44921. return *reinterpret_cast<const VkExportFenceWin32HandleInfoKHR*>( this );
  44922. }
  44923. operator VkExportFenceWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  44924. {
  44925. return *reinterpret_cast<VkExportFenceWin32HandleInfoKHR*>( this );
  44926. }
  44927. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44928. auto operator<=>( ExportFenceWin32HandleInfoKHR const& ) const = default;
  44929. #else
  44930. bool operator==( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44931. {
  44932. return ( sType == rhs.sType )
  44933. && ( pNext == rhs.pNext )
  44934. && ( pAttributes == rhs.pAttributes )
  44935. && ( dwAccess == rhs.dwAccess )
  44936. && ( name == rhs.name );
  44937. }
  44938. bool operator!=( ExportFenceWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  44939. {
  44940. return !operator==( rhs );
  44941. }
  44942. #endif
  44943. public:
  44944. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportFenceWin32HandleInfoKHR;
  44945. const void* pNext = {};
  44946. const SECURITY_ATTRIBUTES* pAttributes = {};
  44947. DWORD dwAccess = {};
  44948. LPCWSTR name = {};
  44949. };
  44950. static_assert( sizeof( ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  44951. static_assert( std::is_standard_layout<ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  44952. template <>
  44953. struct CppType<StructureType, StructureType::eExportFenceWin32HandleInfoKHR>
  44954. {
  44955. using Type = ExportFenceWin32HandleInfoKHR;
  44956. };
  44957. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  44958. struct ExportMemoryAllocateInfo
  44959. {
  44960. static const bool allowDuplicate = false;
  44961. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfo;
  44962. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44963. VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  44964. : handleTypes( handleTypes_ )
  44965. {}
  44966. VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfo( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44967. ExportMemoryAllocateInfo( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  44968. : ExportMemoryAllocateInfo( *reinterpret_cast<ExportMemoryAllocateInfo const *>( &rhs ) )
  44969. {}
  44970. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  44971. VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfo & operator=( ExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  44972. ExportMemoryAllocateInfo & operator=( VkExportMemoryAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  44973. {
  44974. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo const *>( &rhs );
  44975. return *this;
  44976. }
  44977. ExportMemoryAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  44978. {
  44979. pNext = pNext_;
  44980. return *this;
  44981. }
  44982. ExportMemoryAllocateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
  44983. {
  44984. handleTypes = handleTypes_;
  44985. return *this;
  44986. }
  44987. operator VkExportMemoryAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  44988. {
  44989. return *reinterpret_cast<const VkExportMemoryAllocateInfo*>( this );
  44990. }
  44991. operator VkExportMemoryAllocateInfo &() VULKAN_HPP_NOEXCEPT
  44992. {
  44993. return *reinterpret_cast<VkExportMemoryAllocateInfo*>( this );
  44994. }
  44995. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  44996. auto operator<=>( ExportMemoryAllocateInfo const& ) const = default;
  44997. #else
  44998. bool operator==( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  44999. {
  45000. return ( sType == rhs.sType )
  45001. && ( pNext == rhs.pNext )
  45002. && ( handleTypes == rhs.handleTypes );
  45003. }
  45004. bool operator!=( ExportMemoryAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45005. {
  45006. return !operator==( rhs );
  45007. }
  45008. #endif
  45009. public:
  45010. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfo;
  45011. const void* pNext = {};
  45012. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  45013. };
  45014. static_assert( sizeof( ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ), "struct and wrapper have different size!" );
  45015. static_assert( std::is_standard_layout<ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  45016. template <>
  45017. struct CppType<StructureType, StructureType::eExportMemoryAllocateInfo>
  45018. {
  45019. using Type = ExportMemoryAllocateInfo;
  45020. };
  45021. using ExportMemoryAllocateInfoKHR = ExportMemoryAllocateInfo;
  45022. struct ExportMemoryAllocateInfoNV
  45023. {
  45024. static const bool allowDuplicate = false;
  45025. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryAllocateInfoNV;
  45026. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45027. VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  45028. : handleTypes( handleTypes_ )
  45029. {}
  45030. VULKAN_HPP_CONSTEXPR ExportMemoryAllocateInfoNV( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45031. ExportMemoryAllocateInfoNV( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45032. : ExportMemoryAllocateInfoNV( *reinterpret_cast<ExportMemoryAllocateInfoNV const *>( &rhs ) )
  45033. {}
  45034. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45035. VULKAN_HPP_CONSTEXPR_14 ExportMemoryAllocateInfoNV & operator=( ExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45036. ExportMemoryAllocateInfoNV & operator=( VkExportMemoryAllocateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45037. {
  45038. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV const *>( &rhs );
  45039. return *this;
  45040. }
  45041. ExportMemoryAllocateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45042. {
  45043. pNext = pNext_;
  45044. return *this;
  45045. }
  45046. ExportMemoryAllocateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
  45047. {
  45048. handleTypes = handleTypes_;
  45049. return *this;
  45050. }
  45051. operator VkExportMemoryAllocateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  45052. {
  45053. return *reinterpret_cast<const VkExportMemoryAllocateInfoNV*>( this );
  45054. }
  45055. operator VkExportMemoryAllocateInfoNV &() VULKAN_HPP_NOEXCEPT
  45056. {
  45057. return *reinterpret_cast<VkExportMemoryAllocateInfoNV*>( this );
  45058. }
  45059. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45060. auto operator<=>( ExportMemoryAllocateInfoNV const& ) const = default;
  45061. #else
  45062. bool operator==( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45063. {
  45064. return ( sType == rhs.sType )
  45065. && ( pNext == rhs.pNext )
  45066. && ( handleTypes == rhs.handleTypes );
  45067. }
  45068. bool operator!=( ExportMemoryAllocateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45069. {
  45070. return !operator==( rhs );
  45071. }
  45072. #endif
  45073. public:
  45074. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryAllocateInfoNV;
  45075. const void* pNext = {};
  45076. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  45077. };
  45078. static_assert( sizeof( ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ), "struct and wrapper have different size!" );
  45079. static_assert( std::is_standard_layout<ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
  45080. template <>
  45081. struct CppType<StructureType, StructureType::eExportMemoryAllocateInfoNV>
  45082. {
  45083. using Type = ExportMemoryAllocateInfoNV;
  45084. };
  45085. #ifdef VK_USE_PLATFORM_WIN32_KHR
  45086. struct ExportMemoryWin32HandleInfoKHR
  45087. {
  45088. static const bool allowDuplicate = false;
  45089. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoKHR;
  45090. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45091. VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  45092. : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
  45093. {}
  45094. VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoKHR( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45095. ExportMemoryWin32HandleInfoKHR( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45096. : ExportMemoryWin32HandleInfoKHR( *reinterpret_cast<ExportMemoryWin32HandleInfoKHR const *>( &rhs ) )
  45097. {}
  45098. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45099. VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoKHR & operator=( ExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45100. ExportMemoryWin32HandleInfoKHR & operator=( VkExportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45101. {
  45102. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR const *>( &rhs );
  45103. return *this;
  45104. }
  45105. ExportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45106. {
  45107. pNext = pNext_;
  45108. return *this;
  45109. }
  45110. ExportMemoryWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
  45111. {
  45112. pAttributes = pAttributes_;
  45113. return *this;
  45114. }
  45115. ExportMemoryWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
  45116. {
  45117. dwAccess = dwAccess_;
  45118. return *this;
  45119. }
  45120. ExportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  45121. {
  45122. name = name_;
  45123. return *this;
  45124. }
  45125. operator VkExportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  45126. {
  45127. return *reinterpret_cast<const VkExportMemoryWin32HandleInfoKHR*>( this );
  45128. }
  45129. operator VkExportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  45130. {
  45131. return *reinterpret_cast<VkExportMemoryWin32HandleInfoKHR*>( this );
  45132. }
  45133. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45134. auto operator<=>( ExportMemoryWin32HandleInfoKHR const& ) const = default;
  45135. #else
  45136. bool operator==( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45137. {
  45138. return ( sType == rhs.sType )
  45139. && ( pNext == rhs.pNext )
  45140. && ( pAttributes == rhs.pAttributes )
  45141. && ( dwAccess == rhs.dwAccess )
  45142. && ( name == rhs.name );
  45143. }
  45144. bool operator!=( ExportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45145. {
  45146. return !operator==( rhs );
  45147. }
  45148. #endif
  45149. public:
  45150. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoKHR;
  45151. const void* pNext = {};
  45152. const SECURITY_ATTRIBUTES* pAttributes = {};
  45153. DWORD dwAccess = {};
  45154. LPCWSTR name = {};
  45155. };
  45156. static_assert( sizeof( ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  45157. static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  45158. template <>
  45159. struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoKHR>
  45160. {
  45161. using Type = ExportMemoryWin32HandleInfoKHR;
  45162. };
  45163. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  45164. #ifdef VK_USE_PLATFORM_WIN32_KHR
  45165. struct ExportMemoryWin32HandleInfoNV
  45166. {
  45167. static const bool allowDuplicate = false;
  45168. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportMemoryWin32HandleInfoNV;
  45169. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45170. VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}) VULKAN_HPP_NOEXCEPT
  45171. : pAttributes( pAttributes_ ), dwAccess( dwAccess_ )
  45172. {}
  45173. VULKAN_HPP_CONSTEXPR ExportMemoryWin32HandleInfoNV( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45174. ExportMemoryWin32HandleInfoNV( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45175. : ExportMemoryWin32HandleInfoNV( *reinterpret_cast<ExportMemoryWin32HandleInfoNV const *>( &rhs ) )
  45176. {}
  45177. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45178. VULKAN_HPP_CONSTEXPR_14 ExportMemoryWin32HandleInfoNV & operator=( ExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45179. ExportMemoryWin32HandleInfoNV & operator=( VkExportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45180. {
  45181. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV const *>( &rhs );
  45182. return *this;
  45183. }
  45184. ExportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45185. {
  45186. pNext = pNext_;
  45187. return *this;
  45188. }
  45189. ExportMemoryWin32HandleInfoNV & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
  45190. {
  45191. pAttributes = pAttributes_;
  45192. return *this;
  45193. }
  45194. ExportMemoryWin32HandleInfoNV & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
  45195. {
  45196. dwAccess = dwAccess_;
  45197. return *this;
  45198. }
  45199. operator VkExportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
  45200. {
  45201. return *reinterpret_cast<const VkExportMemoryWin32HandleInfoNV*>( this );
  45202. }
  45203. operator VkExportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
  45204. {
  45205. return *reinterpret_cast<VkExportMemoryWin32HandleInfoNV*>( this );
  45206. }
  45207. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45208. auto operator<=>( ExportMemoryWin32HandleInfoNV const& ) const = default;
  45209. #else
  45210. bool operator==( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45211. {
  45212. return ( sType == rhs.sType )
  45213. && ( pNext == rhs.pNext )
  45214. && ( pAttributes == rhs.pAttributes )
  45215. && ( dwAccess == rhs.dwAccess );
  45216. }
  45217. bool operator!=( ExportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45218. {
  45219. return !operator==( rhs );
  45220. }
  45221. #endif
  45222. public:
  45223. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportMemoryWin32HandleInfoNV;
  45224. const void* pNext = {};
  45225. const SECURITY_ATTRIBUTES* pAttributes = {};
  45226. DWORD dwAccess = {};
  45227. };
  45228. static_assert( sizeof( ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
  45229. static_assert( std::is_standard_layout<ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
  45230. template <>
  45231. struct CppType<StructureType, StructureType::eExportMemoryWin32HandleInfoNV>
  45232. {
  45233. using Type = ExportMemoryWin32HandleInfoNV;
  45234. };
  45235. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  45236. struct ExportSemaphoreCreateInfo
  45237. {
  45238. static const bool allowDuplicate = false;
  45239. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreCreateInfo;
  45240. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45241. VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo(VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  45242. : handleTypes( handleTypes_ )
  45243. {}
  45244. VULKAN_HPP_CONSTEXPR ExportSemaphoreCreateInfo( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45245. ExportSemaphoreCreateInfo( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45246. : ExportSemaphoreCreateInfo( *reinterpret_cast<ExportSemaphoreCreateInfo const *>( &rhs ) )
  45247. {}
  45248. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45249. VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreCreateInfo & operator=( ExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45250. ExportSemaphoreCreateInfo & operator=( VkExportSemaphoreCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45251. {
  45252. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo const *>( &rhs );
  45253. return *this;
  45254. }
  45255. ExportSemaphoreCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45256. {
  45257. pNext = pNext_;
  45258. return *this;
  45259. }
  45260. ExportSemaphoreCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
  45261. {
  45262. handleTypes = handleTypes_;
  45263. return *this;
  45264. }
  45265. operator VkExportSemaphoreCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  45266. {
  45267. return *reinterpret_cast<const VkExportSemaphoreCreateInfo*>( this );
  45268. }
  45269. operator VkExportSemaphoreCreateInfo &() VULKAN_HPP_NOEXCEPT
  45270. {
  45271. return *reinterpret_cast<VkExportSemaphoreCreateInfo*>( this );
  45272. }
  45273. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45274. auto operator<=>( ExportSemaphoreCreateInfo const& ) const = default;
  45275. #else
  45276. bool operator==( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45277. {
  45278. return ( sType == rhs.sType )
  45279. && ( pNext == rhs.pNext )
  45280. && ( handleTypes == rhs.handleTypes );
  45281. }
  45282. bool operator!=( ExportSemaphoreCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45283. {
  45284. return !operator==( rhs );
  45285. }
  45286. #endif
  45287. public:
  45288. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreCreateInfo;
  45289. const void* pNext = {};
  45290. VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags handleTypes = {};
  45291. };
  45292. static_assert( sizeof( ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ), "struct and wrapper have different size!" );
  45293. static_assert( std::is_standard_layout<ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
  45294. template <>
  45295. struct CppType<StructureType, StructureType::eExportSemaphoreCreateInfo>
  45296. {
  45297. using Type = ExportSemaphoreCreateInfo;
  45298. };
  45299. using ExportSemaphoreCreateInfoKHR = ExportSemaphoreCreateInfo;
  45300. #ifdef VK_USE_PLATFORM_WIN32_KHR
  45301. struct ExportSemaphoreWin32HandleInfoKHR
  45302. {
  45303. static const bool allowDuplicate = false;
  45304. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
  45305. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45306. VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR(const SECURITY_ATTRIBUTES* pAttributes_ = {}, DWORD dwAccess_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  45307. : pAttributes( pAttributes_ ), dwAccess( dwAccess_ ), name( name_ )
  45308. {}
  45309. VULKAN_HPP_CONSTEXPR ExportSemaphoreWin32HandleInfoKHR( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45310. ExportSemaphoreWin32HandleInfoKHR( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45311. : ExportSemaphoreWin32HandleInfoKHR( *reinterpret_cast<ExportSemaphoreWin32HandleInfoKHR const *>( &rhs ) )
  45312. {}
  45313. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45314. VULKAN_HPP_CONSTEXPR_14 ExportSemaphoreWin32HandleInfoKHR & operator=( ExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45315. ExportSemaphoreWin32HandleInfoKHR & operator=( VkExportSemaphoreWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45316. {
  45317. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR const *>( &rhs );
  45318. return *this;
  45319. }
  45320. ExportSemaphoreWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45321. {
  45322. pNext = pNext_;
  45323. return *this;
  45324. }
  45325. ExportSemaphoreWin32HandleInfoKHR & setPAttributes( const SECURITY_ATTRIBUTES* pAttributes_ ) VULKAN_HPP_NOEXCEPT
  45326. {
  45327. pAttributes = pAttributes_;
  45328. return *this;
  45329. }
  45330. ExportSemaphoreWin32HandleInfoKHR & setDwAccess( DWORD dwAccess_ ) VULKAN_HPP_NOEXCEPT
  45331. {
  45332. dwAccess = dwAccess_;
  45333. return *this;
  45334. }
  45335. ExportSemaphoreWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  45336. {
  45337. name = name_;
  45338. return *this;
  45339. }
  45340. operator VkExportSemaphoreWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  45341. {
  45342. return *reinterpret_cast<const VkExportSemaphoreWin32HandleInfoKHR*>( this );
  45343. }
  45344. operator VkExportSemaphoreWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  45345. {
  45346. return *reinterpret_cast<VkExportSemaphoreWin32HandleInfoKHR*>( this );
  45347. }
  45348. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45349. auto operator<=>( ExportSemaphoreWin32HandleInfoKHR const& ) const = default;
  45350. #else
  45351. bool operator==( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45352. {
  45353. return ( sType == rhs.sType )
  45354. && ( pNext == rhs.pNext )
  45355. && ( pAttributes == rhs.pAttributes )
  45356. && ( dwAccess == rhs.dwAccess )
  45357. && ( name == rhs.name );
  45358. }
  45359. bool operator!=( ExportSemaphoreWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45360. {
  45361. return !operator==( rhs );
  45362. }
  45363. #endif
  45364. public:
  45365. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExportSemaphoreWin32HandleInfoKHR;
  45366. const void* pNext = {};
  45367. const SECURITY_ATTRIBUTES* pAttributes = {};
  45368. DWORD dwAccess = {};
  45369. LPCWSTR name = {};
  45370. };
  45371. static_assert( sizeof( ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  45372. static_assert( std::is_standard_layout<ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  45373. template <>
  45374. struct CppType<StructureType, StructureType::eExportSemaphoreWin32HandleInfoKHR>
  45375. {
  45376. using Type = ExportSemaphoreWin32HandleInfoKHR;
  45377. };
  45378. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  45379. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  45380. struct ExternalFormatANDROID
  45381. {
  45382. static const bool allowDuplicate = false;
  45383. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalFormatANDROID;
  45384. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45385. VULKAN_HPP_CONSTEXPR ExternalFormatANDROID(uint64_t externalFormat_ = {}) VULKAN_HPP_NOEXCEPT
  45386. : externalFormat( externalFormat_ )
  45387. {}
  45388. VULKAN_HPP_CONSTEXPR ExternalFormatANDROID( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45389. ExternalFormatANDROID( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  45390. : ExternalFormatANDROID( *reinterpret_cast<ExternalFormatANDROID const *>( &rhs ) )
  45391. {}
  45392. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45393. VULKAN_HPP_CONSTEXPR_14 ExternalFormatANDROID & operator=( ExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45394. ExternalFormatANDROID & operator=( VkExternalFormatANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  45395. {
  45396. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID const *>( &rhs );
  45397. return *this;
  45398. }
  45399. ExternalFormatANDROID & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45400. {
  45401. pNext = pNext_;
  45402. return *this;
  45403. }
  45404. ExternalFormatANDROID & setExternalFormat( uint64_t externalFormat_ ) VULKAN_HPP_NOEXCEPT
  45405. {
  45406. externalFormat = externalFormat_;
  45407. return *this;
  45408. }
  45409. operator VkExternalFormatANDROID const&() const VULKAN_HPP_NOEXCEPT
  45410. {
  45411. return *reinterpret_cast<const VkExternalFormatANDROID*>( this );
  45412. }
  45413. operator VkExternalFormatANDROID &() VULKAN_HPP_NOEXCEPT
  45414. {
  45415. return *reinterpret_cast<VkExternalFormatANDROID*>( this );
  45416. }
  45417. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45418. auto operator<=>( ExternalFormatANDROID const& ) const = default;
  45419. #else
  45420. bool operator==( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  45421. {
  45422. return ( sType == rhs.sType )
  45423. && ( pNext == rhs.pNext )
  45424. && ( externalFormat == rhs.externalFormat );
  45425. }
  45426. bool operator!=( ExternalFormatANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  45427. {
  45428. return !operator==( rhs );
  45429. }
  45430. #endif
  45431. public:
  45432. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalFormatANDROID;
  45433. void* pNext = {};
  45434. uint64_t externalFormat = {};
  45435. };
  45436. static_assert( sizeof( ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ), "struct and wrapper have different size!" );
  45437. static_assert( std::is_standard_layout<ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
  45438. template <>
  45439. struct CppType<StructureType, StructureType::eExternalFormatANDROID>
  45440. {
  45441. using Type = ExternalFormatANDROID;
  45442. };
  45443. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  45444. struct ExternalImageFormatProperties
  45445. {
  45446. static const bool allowDuplicate = false;
  45447. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalImageFormatProperties;
  45448. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45449. VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties(VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties_ = {}) VULKAN_HPP_NOEXCEPT
  45450. : externalMemoryProperties( externalMemoryProperties_ )
  45451. {}
  45452. VULKAN_HPP_CONSTEXPR ExternalImageFormatProperties( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45453. ExternalImageFormatProperties( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  45454. : ExternalImageFormatProperties( *reinterpret_cast<ExternalImageFormatProperties const *>( &rhs ) )
  45455. {}
  45456. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45457. VULKAN_HPP_CONSTEXPR_14 ExternalImageFormatProperties & operator=( ExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45458. ExternalImageFormatProperties & operator=( VkExternalImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  45459. {
  45460. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties const *>( &rhs );
  45461. return *this;
  45462. }
  45463. operator VkExternalImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
  45464. {
  45465. return *reinterpret_cast<const VkExternalImageFormatProperties*>( this );
  45466. }
  45467. operator VkExternalImageFormatProperties &() VULKAN_HPP_NOEXCEPT
  45468. {
  45469. return *reinterpret_cast<VkExternalImageFormatProperties*>( this );
  45470. }
  45471. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45472. auto operator<=>( ExternalImageFormatProperties const& ) const = default;
  45473. #else
  45474. bool operator==( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  45475. {
  45476. return ( sType == rhs.sType )
  45477. && ( pNext == rhs.pNext )
  45478. && ( externalMemoryProperties == rhs.externalMemoryProperties );
  45479. }
  45480. bool operator!=( ExternalImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  45481. {
  45482. return !operator==( rhs );
  45483. }
  45484. #endif
  45485. public:
  45486. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalImageFormatProperties;
  45487. void* pNext = {};
  45488. VULKAN_HPP_NAMESPACE::ExternalMemoryProperties externalMemoryProperties = {};
  45489. };
  45490. static_assert( sizeof( ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ), "struct and wrapper have different size!" );
  45491. static_assert( std::is_standard_layout<ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
  45492. template <>
  45493. struct CppType<StructureType, StructureType::eExternalImageFormatProperties>
  45494. {
  45495. using Type = ExternalImageFormatProperties;
  45496. };
  45497. using ExternalImageFormatPropertiesKHR = ExternalImageFormatProperties;
  45498. struct ExternalMemoryBufferCreateInfo
  45499. {
  45500. static const bool allowDuplicate = false;
  45501. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryBufferCreateInfo;
  45502. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45503. VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  45504. : handleTypes( handleTypes_ )
  45505. {}
  45506. VULKAN_HPP_CONSTEXPR ExternalMemoryBufferCreateInfo( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45507. ExternalMemoryBufferCreateInfo( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45508. : ExternalMemoryBufferCreateInfo( *reinterpret_cast<ExternalMemoryBufferCreateInfo const *>( &rhs ) )
  45509. {}
  45510. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45511. VULKAN_HPP_CONSTEXPR_14 ExternalMemoryBufferCreateInfo & operator=( ExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45512. ExternalMemoryBufferCreateInfo & operator=( VkExternalMemoryBufferCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45513. {
  45514. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo const *>( &rhs );
  45515. return *this;
  45516. }
  45517. ExternalMemoryBufferCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45518. {
  45519. pNext = pNext_;
  45520. return *this;
  45521. }
  45522. ExternalMemoryBufferCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
  45523. {
  45524. handleTypes = handleTypes_;
  45525. return *this;
  45526. }
  45527. operator VkExternalMemoryBufferCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  45528. {
  45529. return *reinterpret_cast<const VkExternalMemoryBufferCreateInfo*>( this );
  45530. }
  45531. operator VkExternalMemoryBufferCreateInfo &() VULKAN_HPP_NOEXCEPT
  45532. {
  45533. return *reinterpret_cast<VkExternalMemoryBufferCreateInfo*>( this );
  45534. }
  45535. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45536. auto operator<=>( ExternalMemoryBufferCreateInfo const& ) const = default;
  45537. #else
  45538. bool operator==( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45539. {
  45540. return ( sType == rhs.sType )
  45541. && ( pNext == rhs.pNext )
  45542. && ( handleTypes == rhs.handleTypes );
  45543. }
  45544. bool operator!=( ExternalMemoryBufferCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45545. {
  45546. return !operator==( rhs );
  45547. }
  45548. #endif
  45549. public:
  45550. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryBufferCreateInfo;
  45551. const void* pNext = {};
  45552. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  45553. };
  45554. static_assert( sizeof( ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ), "struct and wrapper have different size!" );
  45555. static_assert( std::is_standard_layout<ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
  45556. template <>
  45557. struct CppType<StructureType, StructureType::eExternalMemoryBufferCreateInfo>
  45558. {
  45559. using Type = ExternalMemoryBufferCreateInfo;
  45560. };
  45561. using ExternalMemoryBufferCreateInfoKHR = ExternalMemoryBufferCreateInfo;
  45562. struct ExternalMemoryImageCreateInfo
  45563. {
  45564. static const bool allowDuplicate = false;
  45565. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfo;
  45566. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45567. VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  45568. : handleTypes( handleTypes_ )
  45569. {}
  45570. VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfo( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45571. ExternalMemoryImageCreateInfo( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45572. : ExternalMemoryImageCreateInfo( *reinterpret_cast<ExternalMemoryImageCreateInfo const *>( &rhs ) )
  45573. {}
  45574. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45575. VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfo & operator=( ExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45576. ExternalMemoryImageCreateInfo & operator=( VkExternalMemoryImageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45577. {
  45578. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo const *>( &rhs );
  45579. return *this;
  45580. }
  45581. ExternalMemoryImageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45582. {
  45583. pNext = pNext_;
  45584. return *this;
  45585. }
  45586. ExternalMemoryImageCreateInfo & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes_ ) VULKAN_HPP_NOEXCEPT
  45587. {
  45588. handleTypes = handleTypes_;
  45589. return *this;
  45590. }
  45591. operator VkExternalMemoryImageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  45592. {
  45593. return *reinterpret_cast<const VkExternalMemoryImageCreateInfo*>( this );
  45594. }
  45595. operator VkExternalMemoryImageCreateInfo &() VULKAN_HPP_NOEXCEPT
  45596. {
  45597. return *reinterpret_cast<VkExternalMemoryImageCreateInfo*>( this );
  45598. }
  45599. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45600. auto operator<=>( ExternalMemoryImageCreateInfo const& ) const = default;
  45601. #else
  45602. bool operator==( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45603. {
  45604. return ( sType == rhs.sType )
  45605. && ( pNext == rhs.pNext )
  45606. && ( handleTypes == rhs.handleTypes );
  45607. }
  45608. bool operator!=( ExternalMemoryImageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45609. {
  45610. return !operator==( rhs );
  45611. }
  45612. #endif
  45613. public:
  45614. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfo;
  45615. const void* pNext = {};
  45616. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags handleTypes = {};
  45617. };
  45618. static_assert( sizeof( ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ), "struct and wrapper have different size!" );
  45619. static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
  45620. template <>
  45621. struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfo>
  45622. {
  45623. using Type = ExternalMemoryImageCreateInfo;
  45624. };
  45625. using ExternalMemoryImageCreateInfoKHR = ExternalMemoryImageCreateInfo;
  45626. struct ExternalMemoryImageCreateInfoNV
  45627. {
  45628. static const bool allowDuplicate = false;
  45629. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eExternalMemoryImageCreateInfoNV;
  45630. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45631. VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ = {}) VULKAN_HPP_NOEXCEPT
  45632. : handleTypes( handleTypes_ )
  45633. {}
  45634. VULKAN_HPP_CONSTEXPR ExternalMemoryImageCreateInfoNV( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45635. ExternalMemoryImageCreateInfoNV( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45636. : ExternalMemoryImageCreateInfoNV( *reinterpret_cast<ExternalMemoryImageCreateInfoNV const *>( &rhs ) )
  45637. {}
  45638. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45639. VULKAN_HPP_CONSTEXPR_14 ExternalMemoryImageCreateInfoNV & operator=( ExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45640. ExternalMemoryImageCreateInfoNV & operator=( VkExternalMemoryImageCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  45641. {
  45642. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV const *>( &rhs );
  45643. return *this;
  45644. }
  45645. ExternalMemoryImageCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45646. {
  45647. pNext = pNext_;
  45648. return *this;
  45649. }
  45650. ExternalMemoryImageCreateInfoNV & setHandleTypes( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes_ ) VULKAN_HPP_NOEXCEPT
  45651. {
  45652. handleTypes = handleTypes_;
  45653. return *this;
  45654. }
  45655. operator VkExternalMemoryImageCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  45656. {
  45657. return *reinterpret_cast<const VkExternalMemoryImageCreateInfoNV*>( this );
  45658. }
  45659. operator VkExternalMemoryImageCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  45660. {
  45661. return *reinterpret_cast<VkExternalMemoryImageCreateInfoNV*>( this );
  45662. }
  45663. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45664. auto operator<=>( ExternalMemoryImageCreateInfoNV const& ) const = default;
  45665. #else
  45666. bool operator==( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45667. {
  45668. return ( sType == rhs.sType )
  45669. && ( pNext == rhs.pNext )
  45670. && ( handleTypes == rhs.handleTypes );
  45671. }
  45672. bool operator!=( ExternalMemoryImageCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  45673. {
  45674. return !operator==( rhs );
  45675. }
  45676. #endif
  45677. public:
  45678. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eExternalMemoryImageCreateInfoNV;
  45679. const void* pNext = {};
  45680. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleTypes = {};
  45681. };
  45682. static_assert( sizeof( ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ), "struct and wrapper have different size!" );
  45683. static_assert( std::is_standard_layout<ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  45684. template <>
  45685. struct CppType<StructureType, StructureType::eExternalMemoryImageCreateInfoNV>
  45686. {
  45687. using Type = ExternalMemoryImageCreateInfoNV;
  45688. };
  45689. struct FilterCubicImageViewImageFormatPropertiesEXT
  45690. {
  45691. static const bool allowDuplicate = false;
  45692. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
  45693. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45694. VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 filterCubic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax_ = {}) VULKAN_HPP_NOEXCEPT
  45695. : filterCubic( filterCubic_ ), filterCubicMinmax( filterCubicMinmax_ )
  45696. {}
  45697. VULKAN_HPP_CONSTEXPR FilterCubicImageViewImageFormatPropertiesEXT( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45698. FilterCubicImageViewImageFormatPropertiesEXT( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  45699. : FilterCubicImageViewImageFormatPropertiesEXT( *reinterpret_cast<FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs ) )
  45700. {}
  45701. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45702. VULKAN_HPP_CONSTEXPR_14 FilterCubicImageViewImageFormatPropertiesEXT & operator=( FilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45703. FilterCubicImageViewImageFormatPropertiesEXT & operator=( VkFilterCubicImageViewImageFormatPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  45704. {
  45705. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT const *>( &rhs );
  45706. return *this;
  45707. }
  45708. operator VkFilterCubicImageViewImageFormatPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  45709. {
  45710. return *reinterpret_cast<const VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
  45711. }
  45712. operator VkFilterCubicImageViewImageFormatPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  45713. {
  45714. return *reinterpret_cast<VkFilterCubicImageViewImageFormatPropertiesEXT*>( this );
  45715. }
  45716. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45717. auto operator<=>( FilterCubicImageViewImageFormatPropertiesEXT const& ) const = default;
  45718. #else
  45719. bool operator==( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  45720. {
  45721. return ( sType == rhs.sType )
  45722. && ( pNext == rhs.pNext )
  45723. && ( filterCubic == rhs.filterCubic )
  45724. && ( filterCubicMinmax == rhs.filterCubicMinmax );
  45725. }
  45726. bool operator!=( FilterCubicImageViewImageFormatPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  45727. {
  45728. return !operator==( rhs );
  45729. }
  45730. #endif
  45731. public:
  45732. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFilterCubicImageViewImageFormatPropertiesEXT;
  45733. void* pNext = {};
  45734. VULKAN_HPP_NAMESPACE::Bool32 filterCubic = {};
  45735. VULKAN_HPP_NAMESPACE::Bool32 filterCubicMinmax = {};
  45736. };
  45737. static_assert( sizeof( FilterCubicImageViewImageFormatPropertiesEXT ) == sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ), "struct and wrapper have different size!" );
  45738. static_assert( std::is_standard_layout<FilterCubicImageViewImageFormatPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  45739. template <>
  45740. struct CppType<StructureType, StructureType::eFilterCubicImageViewImageFormatPropertiesEXT>
  45741. {
  45742. using Type = FilterCubicImageViewImageFormatPropertiesEXT;
  45743. };
  45744. struct FragmentShadingRateAttachmentInfoKHR
  45745. {
  45746. static const bool allowDuplicate = false;
  45747. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
  45748. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45749. VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR(const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment_ = {}, VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize_ = {}) VULKAN_HPP_NOEXCEPT
  45750. : pFragmentShadingRateAttachment( pFragmentShadingRateAttachment_ ), shadingRateAttachmentTexelSize( shadingRateAttachmentTexelSize_ )
  45751. {}
  45752. VULKAN_HPP_CONSTEXPR FragmentShadingRateAttachmentInfoKHR( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45753. FragmentShadingRateAttachmentInfoKHR( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45754. : FragmentShadingRateAttachmentInfoKHR( *reinterpret_cast<FragmentShadingRateAttachmentInfoKHR const *>( &rhs ) )
  45755. {}
  45756. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45757. VULKAN_HPP_CONSTEXPR_14 FragmentShadingRateAttachmentInfoKHR & operator=( FragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45758. FragmentShadingRateAttachmentInfoKHR & operator=( VkFragmentShadingRateAttachmentInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  45759. {
  45760. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR const *>( &rhs );
  45761. return *this;
  45762. }
  45763. FragmentShadingRateAttachmentInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45764. {
  45765. pNext = pNext_;
  45766. return *this;
  45767. }
  45768. FragmentShadingRateAttachmentInfoKHR & setPFragmentShadingRateAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment_ ) VULKAN_HPP_NOEXCEPT
  45769. {
  45770. pFragmentShadingRateAttachment = pFragmentShadingRateAttachment_;
  45771. return *this;
  45772. }
  45773. FragmentShadingRateAttachmentInfoKHR & setShadingRateAttachmentTexelSize( VULKAN_HPP_NAMESPACE::Extent2D const & shadingRateAttachmentTexelSize_ ) VULKAN_HPP_NOEXCEPT
  45774. {
  45775. shadingRateAttachmentTexelSize = shadingRateAttachmentTexelSize_;
  45776. return *this;
  45777. }
  45778. operator VkFragmentShadingRateAttachmentInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  45779. {
  45780. return *reinterpret_cast<const VkFragmentShadingRateAttachmentInfoKHR*>( this );
  45781. }
  45782. operator VkFragmentShadingRateAttachmentInfoKHR &() VULKAN_HPP_NOEXCEPT
  45783. {
  45784. return *reinterpret_cast<VkFragmentShadingRateAttachmentInfoKHR*>( this );
  45785. }
  45786. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45787. auto operator<=>( FragmentShadingRateAttachmentInfoKHR const& ) const = default;
  45788. #else
  45789. bool operator==( FragmentShadingRateAttachmentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45790. {
  45791. return ( sType == rhs.sType )
  45792. && ( pNext == rhs.pNext )
  45793. && ( pFragmentShadingRateAttachment == rhs.pFragmentShadingRateAttachment )
  45794. && ( shadingRateAttachmentTexelSize == rhs.shadingRateAttachmentTexelSize );
  45795. }
  45796. bool operator!=( FragmentShadingRateAttachmentInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  45797. {
  45798. return !operator==( rhs );
  45799. }
  45800. #endif
  45801. public:
  45802. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFragmentShadingRateAttachmentInfoKHR;
  45803. const void* pNext = {};
  45804. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pFragmentShadingRateAttachment = {};
  45805. VULKAN_HPP_NAMESPACE::Extent2D shadingRateAttachmentTexelSize = {};
  45806. };
  45807. static_assert( sizeof( FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ), "struct and wrapper have different size!" );
  45808. static_assert( std::is_standard_layout<FragmentShadingRateAttachmentInfoKHR>::value, "struct wrapper is not a standard layout!" );
  45809. template <>
  45810. struct CppType<StructureType, StructureType::eFragmentShadingRateAttachmentInfoKHR>
  45811. {
  45812. using Type = FragmentShadingRateAttachmentInfoKHR;
  45813. };
  45814. struct FramebufferAttachmentImageInfo
  45815. {
  45816. static const bool allowDuplicate = false;
  45817. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentImageInfo;
  45818. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45819. VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo(VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ = {}, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}, uint32_t width_ = {}, uint32_t height_ = {}, uint32_t layerCount_ = {}, uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
  45820. : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
  45821. {}
  45822. VULKAN_HPP_CONSTEXPR FramebufferAttachmentImageInfo( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45823. FramebufferAttachmentImageInfo( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45824. : FramebufferAttachmentImageInfo( *reinterpret_cast<FramebufferAttachmentImageInfo const *>( &rhs ) )
  45825. {}
  45826. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45827. FramebufferAttachmentImageInfo( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_, uint32_t width_, uint32_t height_, uint32_t layerCount_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
  45828. : flags( flags_ ), usage( usage_ ), width( width_ ), height( height_ ), layerCount( layerCount_ ), viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
  45829. {}
  45830. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45831. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45832. VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentImageInfo & operator=( FramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45833. FramebufferAttachmentImageInfo & operator=( VkFramebufferAttachmentImageInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45834. {
  45835. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo const *>( &rhs );
  45836. return *this;
  45837. }
  45838. FramebufferAttachmentImageInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45839. {
  45840. pNext = pNext_;
  45841. return *this;
  45842. }
  45843. FramebufferAttachmentImageInfo & setFlags( VULKAN_HPP_NAMESPACE::ImageCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  45844. {
  45845. flags = flags_;
  45846. return *this;
  45847. }
  45848. FramebufferAttachmentImageInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  45849. {
  45850. usage = usage_;
  45851. return *this;
  45852. }
  45853. FramebufferAttachmentImageInfo & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
  45854. {
  45855. width = width_;
  45856. return *this;
  45857. }
  45858. FramebufferAttachmentImageInfo & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
  45859. {
  45860. height = height_;
  45861. return *this;
  45862. }
  45863. FramebufferAttachmentImageInfo & setLayerCount( uint32_t layerCount_ ) VULKAN_HPP_NOEXCEPT
  45864. {
  45865. layerCount = layerCount_;
  45866. return *this;
  45867. }
  45868. FramebufferAttachmentImageInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
  45869. {
  45870. viewFormatCount = viewFormatCount_;
  45871. return *this;
  45872. }
  45873. FramebufferAttachmentImageInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
  45874. {
  45875. pViewFormats = pViewFormats_;
  45876. return *this;
  45877. }
  45878. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45879. FramebufferAttachmentImageInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
  45880. {
  45881. viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
  45882. pViewFormats = viewFormats_.data();
  45883. return *this;
  45884. }
  45885. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45886. operator VkFramebufferAttachmentImageInfo const&() const VULKAN_HPP_NOEXCEPT
  45887. {
  45888. return *reinterpret_cast<const VkFramebufferAttachmentImageInfo*>( this );
  45889. }
  45890. operator VkFramebufferAttachmentImageInfo &() VULKAN_HPP_NOEXCEPT
  45891. {
  45892. return *reinterpret_cast<VkFramebufferAttachmentImageInfo*>( this );
  45893. }
  45894. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45895. auto operator<=>( FramebufferAttachmentImageInfo const& ) const = default;
  45896. #else
  45897. bool operator==( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45898. {
  45899. return ( sType == rhs.sType )
  45900. && ( pNext == rhs.pNext )
  45901. && ( flags == rhs.flags )
  45902. && ( usage == rhs.usage )
  45903. && ( width == rhs.width )
  45904. && ( height == rhs.height )
  45905. && ( layerCount == rhs.layerCount )
  45906. && ( viewFormatCount == rhs.viewFormatCount )
  45907. && ( pViewFormats == rhs.pViewFormats );
  45908. }
  45909. bool operator!=( FramebufferAttachmentImageInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45910. {
  45911. return !operator==( rhs );
  45912. }
  45913. #endif
  45914. public:
  45915. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentImageInfo;
  45916. const void* pNext = {};
  45917. VULKAN_HPP_NAMESPACE::ImageCreateFlags flags = {};
  45918. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  45919. uint32_t width = {};
  45920. uint32_t height = {};
  45921. uint32_t layerCount = {};
  45922. uint32_t viewFormatCount = {};
  45923. const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
  45924. };
  45925. static_assert( sizeof( FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ), "struct and wrapper have different size!" );
  45926. static_assert( std::is_standard_layout<FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
  45927. template <>
  45928. struct CppType<StructureType, StructureType::eFramebufferAttachmentImageInfo>
  45929. {
  45930. using Type = FramebufferAttachmentImageInfo;
  45931. };
  45932. using FramebufferAttachmentImageInfoKHR = FramebufferAttachmentImageInfo;
  45933. struct FramebufferAttachmentsCreateInfo
  45934. {
  45935. static const bool allowDuplicate = false;
  45936. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eFramebufferAttachmentsCreateInfo;
  45937. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45938. VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo(uint32_t attachmentImageInfoCount_ = {}, const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ = {}) VULKAN_HPP_NOEXCEPT
  45939. : attachmentImageInfoCount( attachmentImageInfoCount_ ), pAttachmentImageInfos( pAttachmentImageInfos_ )
  45940. {}
  45941. VULKAN_HPP_CONSTEXPR FramebufferAttachmentsCreateInfo( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45942. FramebufferAttachmentsCreateInfo( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45943. : FramebufferAttachmentsCreateInfo( *reinterpret_cast<FramebufferAttachmentsCreateInfo const *>( &rhs ) )
  45944. {}
  45945. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45946. FramebufferAttachmentsCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ )
  45947. : attachmentImageInfoCount( static_cast<uint32_t>( attachmentImageInfos_.size() ) ), pAttachmentImageInfos( attachmentImageInfos_.data() )
  45948. {}
  45949. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45950. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  45951. VULKAN_HPP_CONSTEXPR_14 FramebufferAttachmentsCreateInfo & operator=( FramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  45952. FramebufferAttachmentsCreateInfo & operator=( VkFramebufferAttachmentsCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  45953. {
  45954. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo const *>( &rhs );
  45955. return *this;
  45956. }
  45957. FramebufferAttachmentsCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  45958. {
  45959. pNext = pNext_;
  45960. return *this;
  45961. }
  45962. FramebufferAttachmentsCreateInfo & setAttachmentImageInfoCount( uint32_t attachmentImageInfoCount_ ) VULKAN_HPP_NOEXCEPT
  45963. {
  45964. attachmentImageInfoCount = attachmentImageInfoCount_;
  45965. return *this;
  45966. }
  45967. FramebufferAttachmentsCreateInfo & setPAttachmentImageInfos( const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
  45968. {
  45969. pAttachmentImageInfos = pAttachmentImageInfos_;
  45970. return *this;
  45971. }
  45972. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45973. FramebufferAttachmentsCreateInfo & setAttachmentImageInfos( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo> const & attachmentImageInfos_ ) VULKAN_HPP_NOEXCEPT
  45974. {
  45975. attachmentImageInfoCount = static_cast<uint32_t>( attachmentImageInfos_.size() );
  45976. pAttachmentImageInfos = attachmentImageInfos_.data();
  45977. return *this;
  45978. }
  45979. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  45980. operator VkFramebufferAttachmentsCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  45981. {
  45982. return *reinterpret_cast<const VkFramebufferAttachmentsCreateInfo*>( this );
  45983. }
  45984. operator VkFramebufferAttachmentsCreateInfo &() VULKAN_HPP_NOEXCEPT
  45985. {
  45986. return *reinterpret_cast<VkFramebufferAttachmentsCreateInfo*>( this );
  45987. }
  45988. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  45989. auto operator<=>( FramebufferAttachmentsCreateInfo const& ) const = default;
  45990. #else
  45991. bool operator==( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45992. {
  45993. return ( sType == rhs.sType )
  45994. && ( pNext == rhs.pNext )
  45995. && ( attachmentImageInfoCount == rhs.attachmentImageInfoCount )
  45996. && ( pAttachmentImageInfos == rhs.pAttachmentImageInfos );
  45997. }
  45998. bool operator!=( FramebufferAttachmentsCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  45999. {
  46000. return !operator==( rhs );
  46001. }
  46002. #endif
  46003. public:
  46004. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eFramebufferAttachmentsCreateInfo;
  46005. const void* pNext = {};
  46006. uint32_t attachmentImageInfoCount = {};
  46007. const VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo* pAttachmentImageInfos = {};
  46008. };
  46009. static_assert( sizeof( FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ), "struct and wrapper have different size!" );
  46010. static_assert( std::is_standard_layout<FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
  46011. template <>
  46012. struct CppType<StructureType, StructureType::eFramebufferAttachmentsCreateInfo>
  46013. {
  46014. using Type = FramebufferAttachmentsCreateInfo;
  46015. };
  46016. using FramebufferAttachmentsCreateInfoKHR = FramebufferAttachmentsCreateInfo;
  46017. struct GraphicsShaderGroupCreateInfoNV
  46018. {
  46019. static const bool allowDuplicate = false;
  46020. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsShaderGroupCreateInfoNV;
  46021. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46022. VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV(uint32_t stageCount_ = {}, const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ = {}, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {}) VULKAN_HPP_NOEXCEPT
  46023. : stageCount( stageCount_ ), pStages( pStages_ ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
  46024. {}
  46025. VULKAN_HPP_CONSTEXPR GraphicsShaderGroupCreateInfoNV( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46026. GraphicsShaderGroupCreateInfoNV( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  46027. : GraphicsShaderGroupCreateInfoNV( *reinterpret_cast<GraphicsShaderGroupCreateInfoNV const *>( &rhs ) )
  46028. {}
  46029. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46030. GraphicsShaderGroupCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_, const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ = {}, const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ = {} )
  46031. : stageCount( static_cast<uint32_t>( stages_.size() ) ), pStages( stages_.data() ), pVertexInputState( pVertexInputState_ ), pTessellationState( pTessellationState_ )
  46032. {}
  46033. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46034. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46035. VULKAN_HPP_CONSTEXPR_14 GraphicsShaderGroupCreateInfoNV & operator=( GraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46036. GraphicsShaderGroupCreateInfoNV & operator=( VkGraphicsShaderGroupCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  46037. {
  46038. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV const *>( &rhs );
  46039. return *this;
  46040. }
  46041. GraphicsShaderGroupCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46042. {
  46043. pNext = pNext_;
  46044. return *this;
  46045. }
  46046. GraphicsShaderGroupCreateInfoNV & setStageCount( uint32_t stageCount_ ) VULKAN_HPP_NOEXCEPT
  46047. {
  46048. stageCount = stageCount_;
  46049. return *this;
  46050. }
  46051. GraphicsShaderGroupCreateInfoNV & setPStages( const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages_ ) VULKAN_HPP_NOEXCEPT
  46052. {
  46053. pStages = pStages_;
  46054. return *this;
  46055. }
  46056. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46057. GraphicsShaderGroupCreateInfoNV & setStages( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo> const & stages_ ) VULKAN_HPP_NOEXCEPT
  46058. {
  46059. stageCount = static_cast<uint32_t>( stages_.size() );
  46060. pStages = stages_.data();
  46061. return *this;
  46062. }
  46063. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46064. GraphicsShaderGroupCreateInfoNV & setPVertexInputState( const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState_ ) VULKAN_HPP_NOEXCEPT
  46065. {
  46066. pVertexInputState = pVertexInputState_;
  46067. return *this;
  46068. }
  46069. GraphicsShaderGroupCreateInfoNV & setPTessellationState( const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState_ ) VULKAN_HPP_NOEXCEPT
  46070. {
  46071. pTessellationState = pTessellationState_;
  46072. return *this;
  46073. }
  46074. operator VkGraphicsShaderGroupCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  46075. {
  46076. return *reinterpret_cast<const VkGraphicsShaderGroupCreateInfoNV*>( this );
  46077. }
  46078. operator VkGraphicsShaderGroupCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  46079. {
  46080. return *reinterpret_cast<VkGraphicsShaderGroupCreateInfoNV*>( this );
  46081. }
  46082. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46083. auto operator<=>( GraphicsShaderGroupCreateInfoNV const& ) const = default;
  46084. #else
  46085. bool operator==( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  46086. {
  46087. return ( sType == rhs.sType )
  46088. && ( pNext == rhs.pNext )
  46089. && ( stageCount == rhs.stageCount )
  46090. && ( pStages == rhs.pStages )
  46091. && ( pVertexInputState == rhs.pVertexInputState )
  46092. && ( pTessellationState == rhs.pTessellationState );
  46093. }
  46094. bool operator!=( GraphicsShaderGroupCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  46095. {
  46096. return !operator==( rhs );
  46097. }
  46098. #endif
  46099. public:
  46100. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsShaderGroupCreateInfoNV;
  46101. const void* pNext = {};
  46102. uint32_t stageCount = {};
  46103. const VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo* pStages = {};
  46104. const VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo* pVertexInputState = {};
  46105. const VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo* pTessellationState = {};
  46106. };
  46107. static_assert( sizeof( GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ), "struct and wrapper have different size!" );
  46108. static_assert( std::is_standard_layout<GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  46109. template <>
  46110. struct CppType<StructureType, StructureType::eGraphicsShaderGroupCreateInfoNV>
  46111. {
  46112. using Type = GraphicsShaderGroupCreateInfoNV;
  46113. };
  46114. struct GraphicsPipelineShaderGroupsCreateInfoNV
  46115. {
  46116. static const bool allowDuplicate = false;
  46117. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
  46118. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46119. VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV(uint32_t groupCount_ = {}, const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ = {}, uint32_t pipelineCount_ = {}, const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ = {}) VULKAN_HPP_NOEXCEPT
  46120. : groupCount( groupCount_ ), pGroups( pGroups_ ), pipelineCount( pipelineCount_ ), pPipelines( pPipelines_ )
  46121. {}
  46122. VULKAN_HPP_CONSTEXPR GraphicsPipelineShaderGroupsCreateInfoNV( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46123. GraphicsPipelineShaderGroupsCreateInfoNV( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  46124. : GraphicsPipelineShaderGroupsCreateInfoNV( *reinterpret_cast<GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs ) )
  46125. {}
  46126. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46127. GraphicsPipelineShaderGroupsCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ = {} )
  46128. : groupCount( static_cast<uint32_t>( groups_.size() ) ), pGroups( groups_.data() ), pipelineCount( static_cast<uint32_t>( pipelines_.size() ) ), pPipelines( pipelines_.data() )
  46129. {}
  46130. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46131. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46132. VULKAN_HPP_CONSTEXPR_14 GraphicsPipelineShaderGroupsCreateInfoNV & operator=( GraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46133. GraphicsPipelineShaderGroupsCreateInfoNV & operator=( VkGraphicsPipelineShaderGroupsCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  46134. {
  46135. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV const *>( &rhs );
  46136. return *this;
  46137. }
  46138. GraphicsPipelineShaderGroupsCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46139. {
  46140. pNext = pNext_;
  46141. return *this;
  46142. }
  46143. GraphicsPipelineShaderGroupsCreateInfoNV & setGroupCount( uint32_t groupCount_ ) VULKAN_HPP_NOEXCEPT
  46144. {
  46145. groupCount = groupCount_;
  46146. return *this;
  46147. }
  46148. GraphicsPipelineShaderGroupsCreateInfoNV & setPGroups( const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups_ ) VULKAN_HPP_NOEXCEPT
  46149. {
  46150. pGroups = pGroups_;
  46151. return *this;
  46152. }
  46153. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46154. GraphicsPipelineShaderGroupsCreateInfoNV & setGroups( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV> const & groups_ ) VULKAN_HPP_NOEXCEPT
  46155. {
  46156. groupCount = static_cast<uint32_t>( groups_.size() );
  46157. pGroups = groups_.data();
  46158. return *this;
  46159. }
  46160. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46161. GraphicsPipelineShaderGroupsCreateInfoNV & setPipelineCount( uint32_t pipelineCount_ ) VULKAN_HPP_NOEXCEPT
  46162. {
  46163. pipelineCount = pipelineCount_;
  46164. return *this;
  46165. }
  46166. GraphicsPipelineShaderGroupsCreateInfoNV & setPPipelines( const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines_ ) VULKAN_HPP_NOEXCEPT
  46167. {
  46168. pPipelines = pPipelines_;
  46169. return *this;
  46170. }
  46171. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46172. GraphicsPipelineShaderGroupsCreateInfoNV & setPipelines( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Pipeline> const & pipelines_ ) VULKAN_HPP_NOEXCEPT
  46173. {
  46174. pipelineCount = static_cast<uint32_t>( pipelines_.size() );
  46175. pPipelines = pipelines_.data();
  46176. return *this;
  46177. }
  46178. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46179. operator VkGraphicsPipelineShaderGroupsCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  46180. {
  46181. return *reinterpret_cast<const VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
  46182. }
  46183. operator VkGraphicsPipelineShaderGroupsCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  46184. {
  46185. return *reinterpret_cast<VkGraphicsPipelineShaderGroupsCreateInfoNV*>( this );
  46186. }
  46187. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46188. auto operator<=>( GraphicsPipelineShaderGroupsCreateInfoNV const& ) const = default;
  46189. #else
  46190. bool operator==( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  46191. {
  46192. return ( sType == rhs.sType )
  46193. && ( pNext == rhs.pNext )
  46194. && ( groupCount == rhs.groupCount )
  46195. && ( pGroups == rhs.pGroups )
  46196. && ( pipelineCount == rhs.pipelineCount )
  46197. && ( pPipelines == rhs.pPipelines );
  46198. }
  46199. bool operator!=( GraphicsPipelineShaderGroupsCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  46200. {
  46201. return !operator==( rhs );
  46202. }
  46203. #endif
  46204. public:
  46205. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV;
  46206. const void* pNext = {};
  46207. uint32_t groupCount = {};
  46208. const VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV* pGroups = {};
  46209. uint32_t pipelineCount = {};
  46210. const VULKAN_HPP_NAMESPACE::Pipeline* pPipelines = {};
  46211. };
  46212. static_assert( sizeof( GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ), "struct and wrapper have different size!" );
  46213. static_assert( std::is_standard_layout<GraphicsPipelineShaderGroupsCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  46214. template <>
  46215. struct CppType<StructureType, StructureType::eGraphicsPipelineShaderGroupsCreateInfoNV>
  46216. {
  46217. using Type = GraphicsPipelineShaderGroupsCreateInfoNV;
  46218. };
  46219. struct HeadlessSurfaceCreateInfoEXT
  46220. {
  46221. static const bool allowDuplicate = false;
  46222. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eHeadlessSurfaceCreateInfoEXT;
  46223. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46224. VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ = {}) VULKAN_HPP_NOEXCEPT
  46225. : flags( flags_ )
  46226. {}
  46227. VULKAN_HPP_CONSTEXPR HeadlessSurfaceCreateInfoEXT( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46228. HeadlessSurfaceCreateInfoEXT( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46229. : HeadlessSurfaceCreateInfoEXT( *reinterpret_cast<HeadlessSurfaceCreateInfoEXT const *>( &rhs ) )
  46230. {}
  46231. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46232. VULKAN_HPP_CONSTEXPR_14 HeadlessSurfaceCreateInfoEXT & operator=( HeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46233. HeadlessSurfaceCreateInfoEXT & operator=( VkHeadlessSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46234. {
  46235. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT const *>( &rhs );
  46236. return *this;
  46237. }
  46238. HeadlessSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46239. {
  46240. pNext = pNext_;
  46241. return *this;
  46242. }
  46243. HeadlessSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  46244. {
  46245. flags = flags_;
  46246. return *this;
  46247. }
  46248. operator VkHeadlessSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  46249. {
  46250. return *reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT*>( this );
  46251. }
  46252. operator VkHeadlessSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  46253. {
  46254. return *reinterpret_cast<VkHeadlessSurfaceCreateInfoEXT*>( this );
  46255. }
  46256. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46257. auto operator<=>( HeadlessSurfaceCreateInfoEXT const& ) const = default;
  46258. #else
  46259. bool operator==( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46260. {
  46261. return ( sType == rhs.sType )
  46262. && ( pNext == rhs.pNext )
  46263. && ( flags == rhs.flags );
  46264. }
  46265. bool operator!=( HeadlessSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46266. {
  46267. return !operator==( rhs );
  46268. }
  46269. #endif
  46270. public:
  46271. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eHeadlessSurfaceCreateInfoEXT;
  46272. const void* pNext = {};
  46273. VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT flags = {};
  46274. };
  46275. static_assert( sizeof( HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
  46276. static_assert( std::is_standard_layout<HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  46277. template <>
  46278. struct CppType<StructureType, StructureType::eHeadlessSurfaceCreateInfoEXT>
  46279. {
  46280. using Type = HeadlessSurfaceCreateInfoEXT;
  46281. };
  46282. #ifdef VK_USE_PLATFORM_IOS_MVK
  46283. struct IOSSurfaceCreateInfoMVK
  46284. {
  46285. static const bool allowDuplicate = false;
  46286. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eIosSurfaceCreateInfoMVK;
  46287. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46288. VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
  46289. : flags( flags_ ), pView( pView_ )
  46290. {}
  46291. VULKAN_HPP_CONSTEXPR IOSSurfaceCreateInfoMVK( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46292. IOSSurfaceCreateInfoMVK( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
  46293. : IOSSurfaceCreateInfoMVK( *reinterpret_cast<IOSSurfaceCreateInfoMVK const *>( &rhs ) )
  46294. {}
  46295. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46296. VULKAN_HPP_CONSTEXPR_14 IOSSurfaceCreateInfoMVK & operator=( IOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46297. IOSSurfaceCreateInfoMVK & operator=( VkIOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
  46298. {
  46299. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK const *>( &rhs );
  46300. return *this;
  46301. }
  46302. IOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46303. {
  46304. pNext = pNext_;
  46305. return *this;
  46306. }
  46307. IOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
  46308. {
  46309. flags = flags_;
  46310. return *this;
  46311. }
  46312. IOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
  46313. {
  46314. pView = pView_;
  46315. return *this;
  46316. }
  46317. operator VkIOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
  46318. {
  46319. return *reinterpret_cast<const VkIOSSurfaceCreateInfoMVK*>( this );
  46320. }
  46321. operator VkIOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
  46322. {
  46323. return *reinterpret_cast<VkIOSSurfaceCreateInfoMVK*>( this );
  46324. }
  46325. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46326. auto operator<=>( IOSSurfaceCreateInfoMVK const& ) const = default;
  46327. #else
  46328. bool operator==( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
  46329. {
  46330. return ( sType == rhs.sType )
  46331. && ( pNext == rhs.pNext )
  46332. && ( flags == rhs.flags )
  46333. && ( pView == rhs.pView );
  46334. }
  46335. bool operator!=( IOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
  46336. {
  46337. return !operator==( rhs );
  46338. }
  46339. #endif
  46340. public:
  46341. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eIosSurfaceCreateInfoMVK;
  46342. const void* pNext = {};
  46343. VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK flags = {};
  46344. const void* pView = {};
  46345. };
  46346. static_assert( sizeof( IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
  46347. static_assert( std::is_standard_layout<IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
  46348. template <>
  46349. struct CppType<StructureType, StructureType::eIosSurfaceCreateInfoMVK>
  46350. {
  46351. using Type = IOSSurfaceCreateInfoMVK;
  46352. };
  46353. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  46354. struct ImageDrmFormatModifierExplicitCreateInfoEXT
  46355. {
  46356. static const bool allowDuplicate = false;
  46357. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
  46358. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46359. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT(uint64_t drmFormatModifier_ = {}, uint32_t drmFormatModifierPlaneCount_ = {}, const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ = {}) VULKAN_HPP_NOEXCEPT
  46360. : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( drmFormatModifierPlaneCount_ ), pPlaneLayouts( pPlaneLayouts_ )
  46361. {}
  46362. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierExplicitCreateInfoEXT( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46363. ImageDrmFormatModifierExplicitCreateInfoEXT( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46364. : ImageDrmFormatModifierExplicitCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs ) )
  46365. {}
  46366. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46367. ImageDrmFormatModifierExplicitCreateInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ )
  46368. : drmFormatModifier( drmFormatModifier_ ), drmFormatModifierPlaneCount( static_cast<uint32_t>( planeLayouts_.size() ) ), pPlaneLayouts( planeLayouts_.data() )
  46369. {}
  46370. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46371. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46372. VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( ImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46373. ImageDrmFormatModifierExplicitCreateInfoEXT & operator=( VkImageDrmFormatModifierExplicitCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46374. {
  46375. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT const *>( &rhs );
  46376. return *this;
  46377. }
  46378. ImageDrmFormatModifierExplicitCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46379. {
  46380. pNext = pNext_;
  46381. return *this;
  46382. }
  46383. ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
  46384. {
  46385. drmFormatModifier = drmFormatModifier_;
  46386. return *this;
  46387. }
  46388. ImageDrmFormatModifierExplicitCreateInfoEXT & setDrmFormatModifierPlaneCount( uint32_t drmFormatModifierPlaneCount_ ) VULKAN_HPP_NOEXCEPT
  46389. {
  46390. drmFormatModifierPlaneCount = drmFormatModifierPlaneCount_;
  46391. return *this;
  46392. }
  46393. ImageDrmFormatModifierExplicitCreateInfoEXT & setPPlaneLayouts( const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts_ ) VULKAN_HPP_NOEXCEPT
  46394. {
  46395. pPlaneLayouts = pPlaneLayouts_;
  46396. return *this;
  46397. }
  46398. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46399. ImageDrmFormatModifierExplicitCreateInfoEXT & setPlaneLayouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubresourceLayout> const & planeLayouts_ ) VULKAN_HPP_NOEXCEPT
  46400. {
  46401. drmFormatModifierPlaneCount = static_cast<uint32_t>( planeLayouts_.size() );
  46402. pPlaneLayouts = planeLayouts_.data();
  46403. return *this;
  46404. }
  46405. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46406. operator VkImageDrmFormatModifierExplicitCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  46407. {
  46408. return *reinterpret_cast<const VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
  46409. }
  46410. operator VkImageDrmFormatModifierExplicitCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  46411. {
  46412. return *reinterpret_cast<VkImageDrmFormatModifierExplicitCreateInfoEXT*>( this );
  46413. }
  46414. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46415. auto operator<=>( ImageDrmFormatModifierExplicitCreateInfoEXT const& ) const = default;
  46416. #else
  46417. bool operator==( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46418. {
  46419. return ( sType == rhs.sType )
  46420. && ( pNext == rhs.pNext )
  46421. && ( drmFormatModifier == rhs.drmFormatModifier )
  46422. && ( drmFormatModifierPlaneCount == rhs.drmFormatModifierPlaneCount )
  46423. && ( pPlaneLayouts == rhs.pPlaneLayouts );
  46424. }
  46425. bool operator!=( ImageDrmFormatModifierExplicitCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46426. {
  46427. return !operator==( rhs );
  46428. }
  46429. #endif
  46430. public:
  46431. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT;
  46432. const void* pNext = {};
  46433. uint64_t drmFormatModifier = {};
  46434. uint32_t drmFormatModifierPlaneCount = {};
  46435. const VULKAN_HPP_NAMESPACE::SubresourceLayout* pPlaneLayouts = {};
  46436. };
  46437. static_assert( sizeof( ImageDrmFormatModifierExplicitCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ), "struct and wrapper have different size!" );
  46438. static_assert( std::is_standard_layout<ImageDrmFormatModifierExplicitCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  46439. template <>
  46440. struct CppType<StructureType, StructureType::eImageDrmFormatModifierExplicitCreateInfoEXT>
  46441. {
  46442. using Type = ImageDrmFormatModifierExplicitCreateInfoEXT;
  46443. };
  46444. struct ImageDrmFormatModifierListCreateInfoEXT
  46445. {
  46446. static const bool allowDuplicate = false;
  46447. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
  46448. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46449. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT(uint32_t drmFormatModifierCount_ = {}, const uint64_t* pDrmFormatModifiers_ = {}) VULKAN_HPP_NOEXCEPT
  46450. : drmFormatModifierCount( drmFormatModifierCount_ ), pDrmFormatModifiers( pDrmFormatModifiers_ )
  46451. {}
  46452. VULKAN_HPP_CONSTEXPR ImageDrmFormatModifierListCreateInfoEXT( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46453. ImageDrmFormatModifierListCreateInfoEXT( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46454. : ImageDrmFormatModifierListCreateInfoEXT( *reinterpret_cast<ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs ) )
  46455. {}
  46456. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46457. ImageDrmFormatModifierListCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ )
  46458. : drmFormatModifierCount( static_cast<uint32_t>( drmFormatModifiers_.size() ) ), pDrmFormatModifiers( drmFormatModifiers_.data() )
  46459. {}
  46460. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46461. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46462. VULKAN_HPP_CONSTEXPR_14 ImageDrmFormatModifierListCreateInfoEXT & operator=( ImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46463. ImageDrmFormatModifierListCreateInfoEXT & operator=( VkImageDrmFormatModifierListCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46464. {
  46465. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT const *>( &rhs );
  46466. return *this;
  46467. }
  46468. ImageDrmFormatModifierListCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46469. {
  46470. pNext = pNext_;
  46471. return *this;
  46472. }
  46473. ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifierCount( uint32_t drmFormatModifierCount_ ) VULKAN_HPP_NOEXCEPT
  46474. {
  46475. drmFormatModifierCount = drmFormatModifierCount_;
  46476. return *this;
  46477. }
  46478. ImageDrmFormatModifierListCreateInfoEXT & setPDrmFormatModifiers( const uint64_t* pDrmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
  46479. {
  46480. pDrmFormatModifiers = pDrmFormatModifiers_;
  46481. return *this;
  46482. }
  46483. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46484. ImageDrmFormatModifierListCreateInfoEXT & setDrmFormatModifiers( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & drmFormatModifiers_ ) VULKAN_HPP_NOEXCEPT
  46485. {
  46486. drmFormatModifierCount = static_cast<uint32_t>( drmFormatModifiers_.size() );
  46487. pDrmFormatModifiers = drmFormatModifiers_.data();
  46488. return *this;
  46489. }
  46490. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46491. operator VkImageDrmFormatModifierListCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  46492. {
  46493. return *reinterpret_cast<const VkImageDrmFormatModifierListCreateInfoEXT*>( this );
  46494. }
  46495. operator VkImageDrmFormatModifierListCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  46496. {
  46497. return *reinterpret_cast<VkImageDrmFormatModifierListCreateInfoEXT*>( this );
  46498. }
  46499. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46500. auto operator<=>( ImageDrmFormatModifierListCreateInfoEXT const& ) const = default;
  46501. #else
  46502. bool operator==( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46503. {
  46504. return ( sType == rhs.sType )
  46505. && ( pNext == rhs.pNext )
  46506. && ( drmFormatModifierCount == rhs.drmFormatModifierCount )
  46507. && ( pDrmFormatModifiers == rhs.pDrmFormatModifiers );
  46508. }
  46509. bool operator!=( ImageDrmFormatModifierListCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46510. {
  46511. return !operator==( rhs );
  46512. }
  46513. #endif
  46514. public:
  46515. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageDrmFormatModifierListCreateInfoEXT;
  46516. const void* pNext = {};
  46517. uint32_t drmFormatModifierCount = {};
  46518. const uint64_t* pDrmFormatModifiers = {};
  46519. };
  46520. static_assert( sizeof( ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ), "struct and wrapper have different size!" );
  46521. static_assert( std::is_standard_layout<ImageDrmFormatModifierListCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  46522. template <>
  46523. struct CppType<StructureType, StructureType::eImageDrmFormatModifierListCreateInfoEXT>
  46524. {
  46525. using Type = ImageDrmFormatModifierListCreateInfoEXT;
  46526. };
  46527. struct ImageFormatListCreateInfo
  46528. {
  46529. static const bool allowDuplicate = false;
  46530. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageFormatListCreateInfo;
  46531. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46532. VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo(uint32_t viewFormatCount_ = {}, const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ = {}) VULKAN_HPP_NOEXCEPT
  46533. : viewFormatCount( viewFormatCount_ ), pViewFormats( pViewFormats_ )
  46534. {}
  46535. VULKAN_HPP_CONSTEXPR ImageFormatListCreateInfo( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46536. ImageFormatListCreateInfo( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46537. : ImageFormatListCreateInfo( *reinterpret_cast<ImageFormatListCreateInfo const *>( &rhs ) )
  46538. {}
  46539. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46540. ImageFormatListCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ )
  46541. : viewFormatCount( static_cast<uint32_t>( viewFormats_.size() ) ), pViewFormats( viewFormats_.data() )
  46542. {}
  46543. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46544. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46545. VULKAN_HPP_CONSTEXPR_14 ImageFormatListCreateInfo & operator=( ImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46546. ImageFormatListCreateInfo & operator=( VkImageFormatListCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46547. {
  46548. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo const *>( &rhs );
  46549. return *this;
  46550. }
  46551. ImageFormatListCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46552. {
  46553. pNext = pNext_;
  46554. return *this;
  46555. }
  46556. ImageFormatListCreateInfo & setViewFormatCount( uint32_t viewFormatCount_ ) VULKAN_HPP_NOEXCEPT
  46557. {
  46558. viewFormatCount = viewFormatCount_;
  46559. return *this;
  46560. }
  46561. ImageFormatListCreateInfo & setPViewFormats( const VULKAN_HPP_NAMESPACE::Format* pViewFormats_ ) VULKAN_HPP_NOEXCEPT
  46562. {
  46563. pViewFormats = pViewFormats_;
  46564. return *this;
  46565. }
  46566. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46567. ImageFormatListCreateInfo & setViewFormats( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Format> const & viewFormats_ ) VULKAN_HPP_NOEXCEPT
  46568. {
  46569. viewFormatCount = static_cast<uint32_t>( viewFormats_.size() );
  46570. pViewFormats = viewFormats_.data();
  46571. return *this;
  46572. }
  46573. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  46574. operator VkImageFormatListCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  46575. {
  46576. return *reinterpret_cast<const VkImageFormatListCreateInfo*>( this );
  46577. }
  46578. operator VkImageFormatListCreateInfo &() VULKAN_HPP_NOEXCEPT
  46579. {
  46580. return *reinterpret_cast<VkImageFormatListCreateInfo*>( this );
  46581. }
  46582. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46583. auto operator<=>( ImageFormatListCreateInfo const& ) const = default;
  46584. #else
  46585. bool operator==( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46586. {
  46587. return ( sType == rhs.sType )
  46588. && ( pNext == rhs.pNext )
  46589. && ( viewFormatCount == rhs.viewFormatCount )
  46590. && ( pViewFormats == rhs.pViewFormats );
  46591. }
  46592. bool operator!=( ImageFormatListCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46593. {
  46594. return !operator==( rhs );
  46595. }
  46596. #endif
  46597. public:
  46598. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageFormatListCreateInfo;
  46599. const void* pNext = {};
  46600. uint32_t viewFormatCount = {};
  46601. const VULKAN_HPP_NAMESPACE::Format* pViewFormats = {};
  46602. };
  46603. static_assert( sizeof( ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ), "struct and wrapper have different size!" );
  46604. static_assert( std::is_standard_layout<ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
  46605. template <>
  46606. struct CppType<StructureType, StructureType::eImageFormatListCreateInfo>
  46607. {
  46608. using Type = ImageFormatListCreateInfo;
  46609. };
  46610. using ImageFormatListCreateInfoKHR = ImageFormatListCreateInfo;
  46611. #ifdef VK_USE_PLATFORM_FUCHSIA
  46612. struct ImagePipeSurfaceCreateInfoFUCHSIA
  46613. {
  46614. static const bool allowDuplicate = false;
  46615. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
  46616. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46617. VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA(VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ = {}, zx_handle_t imagePipeHandle_ = {}) VULKAN_HPP_NOEXCEPT
  46618. : flags( flags_ ), imagePipeHandle( imagePipeHandle_ )
  46619. {}
  46620. VULKAN_HPP_CONSTEXPR ImagePipeSurfaceCreateInfoFUCHSIA( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46621. ImagePipeSurfaceCreateInfoFUCHSIA( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
  46622. : ImagePipeSurfaceCreateInfoFUCHSIA( *reinterpret_cast<ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs ) )
  46623. {}
  46624. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46625. VULKAN_HPP_CONSTEXPR_14 ImagePipeSurfaceCreateInfoFUCHSIA & operator=( ImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46626. ImagePipeSurfaceCreateInfoFUCHSIA & operator=( VkImagePipeSurfaceCreateInfoFUCHSIA const & rhs ) VULKAN_HPP_NOEXCEPT
  46627. {
  46628. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA const *>( &rhs );
  46629. return *this;
  46630. }
  46631. ImagePipeSurfaceCreateInfoFUCHSIA & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46632. {
  46633. pNext = pNext_;
  46634. return *this;
  46635. }
  46636. ImagePipeSurfaceCreateInfoFUCHSIA & setFlags( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags_ ) VULKAN_HPP_NOEXCEPT
  46637. {
  46638. flags = flags_;
  46639. return *this;
  46640. }
  46641. ImagePipeSurfaceCreateInfoFUCHSIA & setImagePipeHandle( zx_handle_t imagePipeHandle_ ) VULKAN_HPP_NOEXCEPT
  46642. {
  46643. imagePipeHandle = imagePipeHandle_;
  46644. return *this;
  46645. }
  46646. operator VkImagePipeSurfaceCreateInfoFUCHSIA const&() const VULKAN_HPP_NOEXCEPT
  46647. {
  46648. return *reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
  46649. }
  46650. operator VkImagePipeSurfaceCreateInfoFUCHSIA &() VULKAN_HPP_NOEXCEPT
  46651. {
  46652. return *reinterpret_cast<VkImagePipeSurfaceCreateInfoFUCHSIA*>( this );
  46653. }
  46654. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46655. auto operator<=>( ImagePipeSurfaceCreateInfoFUCHSIA const& ) const = default;
  46656. #else
  46657. bool operator==( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
  46658. {
  46659. return ( sType == rhs.sType )
  46660. && ( pNext == rhs.pNext )
  46661. && ( flags == rhs.flags )
  46662. && ( memcmp( &imagePipeHandle, &rhs.imagePipeHandle, sizeof( zx_handle_t ) ) == 0 );
  46663. }
  46664. bool operator!=( ImagePipeSurfaceCreateInfoFUCHSIA const& rhs ) const VULKAN_HPP_NOEXCEPT
  46665. {
  46666. return !operator==( rhs );
  46667. }
  46668. #endif
  46669. public:
  46670. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagepipeSurfaceCreateInfoFUCHSIA;
  46671. const void* pNext = {};
  46672. VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA flags = {};
  46673. zx_handle_t imagePipeHandle = {};
  46674. };
  46675. static_assert( sizeof( ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ), "struct and wrapper have different size!" );
  46676. static_assert( std::is_standard_layout<ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
  46677. template <>
  46678. struct CppType<StructureType, StructureType::eImagepipeSurfaceCreateInfoFUCHSIA>
  46679. {
  46680. using Type = ImagePipeSurfaceCreateInfoFUCHSIA;
  46681. };
  46682. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  46683. struct ImagePlaneMemoryRequirementsInfo
  46684. {
  46685. static const bool allowDuplicate = false;
  46686. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImagePlaneMemoryRequirementsInfo;
  46687. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46688. VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo(VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor) VULKAN_HPP_NOEXCEPT
  46689. : planeAspect( planeAspect_ )
  46690. {}
  46691. VULKAN_HPP_CONSTEXPR ImagePlaneMemoryRequirementsInfo( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46692. ImagePlaneMemoryRequirementsInfo( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46693. : ImagePlaneMemoryRequirementsInfo( *reinterpret_cast<ImagePlaneMemoryRequirementsInfo const *>( &rhs ) )
  46694. {}
  46695. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46696. VULKAN_HPP_CONSTEXPR_14 ImagePlaneMemoryRequirementsInfo & operator=( ImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46697. ImagePlaneMemoryRequirementsInfo & operator=( VkImagePlaneMemoryRequirementsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46698. {
  46699. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo const *>( &rhs );
  46700. return *this;
  46701. }
  46702. ImagePlaneMemoryRequirementsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46703. {
  46704. pNext = pNext_;
  46705. return *this;
  46706. }
  46707. ImagePlaneMemoryRequirementsInfo & setPlaneAspect( VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect_ ) VULKAN_HPP_NOEXCEPT
  46708. {
  46709. planeAspect = planeAspect_;
  46710. return *this;
  46711. }
  46712. operator VkImagePlaneMemoryRequirementsInfo const&() const VULKAN_HPP_NOEXCEPT
  46713. {
  46714. return *reinterpret_cast<const VkImagePlaneMemoryRequirementsInfo*>( this );
  46715. }
  46716. operator VkImagePlaneMemoryRequirementsInfo &() VULKAN_HPP_NOEXCEPT
  46717. {
  46718. return *reinterpret_cast<VkImagePlaneMemoryRequirementsInfo*>( this );
  46719. }
  46720. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46721. auto operator<=>( ImagePlaneMemoryRequirementsInfo const& ) const = default;
  46722. #else
  46723. bool operator==( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46724. {
  46725. return ( sType == rhs.sType )
  46726. && ( pNext == rhs.pNext )
  46727. && ( planeAspect == rhs.planeAspect );
  46728. }
  46729. bool operator!=( ImagePlaneMemoryRequirementsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46730. {
  46731. return !operator==( rhs );
  46732. }
  46733. #endif
  46734. public:
  46735. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImagePlaneMemoryRequirementsInfo;
  46736. const void* pNext = {};
  46737. VULKAN_HPP_NAMESPACE::ImageAspectFlagBits planeAspect = VULKAN_HPP_NAMESPACE::ImageAspectFlagBits::eColor;
  46738. };
  46739. static_assert( sizeof( ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ), "struct and wrapper have different size!" );
  46740. static_assert( std::is_standard_layout<ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
  46741. template <>
  46742. struct CppType<StructureType, StructureType::eImagePlaneMemoryRequirementsInfo>
  46743. {
  46744. using Type = ImagePlaneMemoryRequirementsInfo;
  46745. };
  46746. using ImagePlaneMemoryRequirementsInfoKHR = ImagePlaneMemoryRequirementsInfo;
  46747. struct ImageStencilUsageCreateInfo
  46748. {
  46749. static const bool allowDuplicate = false;
  46750. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageStencilUsageCreateInfo;
  46751. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46752. VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ = {}) VULKAN_HPP_NOEXCEPT
  46753. : stencilUsage( stencilUsage_ )
  46754. {}
  46755. VULKAN_HPP_CONSTEXPR ImageStencilUsageCreateInfo( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46756. ImageStencilUsageCreateInfo( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46757. : ImageStencilUsageCreateInfo( *reinterpret_cast<ImageStencilUsageCreateInfo const *>( &rhs ) )
  46758. {}
  46759. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46760. VULKAN_HPP_CONSTEXPR_14 ImageStencilUsageCreateInfo & operator=( ImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46761. ImageStencilUsageCreateInfo & operator=( VkImageStencilUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46762. {
  46763. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo const *>( &rhs );
  46764. return *this;
  46765. }
  46766. ImageStencilUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46767. {
  46768. pNext = pNext_;
  46769. return *this;
  46770. }
  46771. ImageStencilUsageCreateInfo & setStencilUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage_ ) VULKAN_HPP_NOEXCEPT
  46772. {
  46773. stencilUsage = stencilUsage_;
  46774. return *this;
  46775. }
  46776. operator VkImageStencilUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  46777. {
  46778. return *reinterpret_cast<const VkImageStencilUsageCreateInfo*>( this );
  46779. }
  46780. operator VkImageStencilUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
  46781. {
  46782. return *reinterpret_cast<VkImageStencilUsageCreateInfo*>( this );
  46783. }
  46784. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46785. auto operator<=>( ImageStencilUsageCreateInfo const& ) const = default;
  46786. #else
  46787. bool operator==( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46788. {
  46789. return ( sType == rhs.sType )
  46790. && ( pNext == rhs.pNext )
  46791. && ( stencilUsage == rhs.stencilUsage );
  46792. }
  46793. bool operator!=( ImageStencilUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46794. {
  46795. return !operator==( rhs );
  46796. }
  46797. #endif
  46798. public:
  46799. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageStencilUsageCreateInfo;
  46800. const void* pNext = {};
  46801. VULKAN_HPP_NAMESPACE::ImageUsageFlags stencilUsage = {};
  46802. };
  46803. static_assert( sizeof( ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ), "struct and wrapper have different size!" );
  46804. static_assert( std::is_standard_layout<ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
  46805. template <>
  46806. struct CppType<StructureType, StructureType::eImageStencilUsageCreateInfo>
  46807. {
  46808. using Type = ImageStencilUsageCreateInfo;
  46809. };
  46810. using ImageStencilUsageCreateInfoEXT = ImageStencilUsageCreateInfo;
  46811. struct ImageSwapchainCreateInfoKHR
  46812. {
  46813. static const bool allowDuplicate = false;
  46814. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageSwapchainCreateInfoKHR;
  46815. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46816. VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR(VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ = {}) VULKAN_HPP_NOEXCEPT
  46817. : swapchain( swapchain_ )
  46818. {}
  46819. VULKAN_HPP_CONSTEXPR ImageSwapchainCreateInfoKHR( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46820. ImageSwapchainCreateInfoKHR( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  46821. : ImageSwapchainCreateInfoKHR( *reinterpret_cast<ImageSwapchainCreateInfoKHR const *>( &rhs ) )
  46822. {}
  46823. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46824. VULKAN_HPP_CONSTEXPR_14 ImageSwapchainCreateInfoKHR & operator=( ImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46825. ImageSwapchainCreateInfoKHR & operator=( VkImageSwapchainCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  46826. {
  46827. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR const *>( &rhs );
  46828. return *this;
  46829. }
  46830. ImageSwapchainCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46831. {
  46832. pNext = pNext_;
  46833. return *this;
  46834. }
  46835. ImageSwapchainCreateInfoKHR & setSwapchain( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain_ ) VULKAN_HPP_NOEXCEPT
  46836. {
  46837. swapchain = swapchain_;
  46838. return *this;
  46839. }
  46840. operator VkImageSwapchainCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  46841. {
  46842. return *reinterpret_cast<const VkImageSwapchainCreateInfoKHR*>( this );
  46843. }
  46844. operator VkImageSwapchainCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  46845. {
  46846. return *reinterpret_cast<VkImageSwapchainCreateInfoKHR*>( this );
  46847. }
  46848. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46849. auto operator<=>( ImageSwapchainCreateInfoKHR const& ) const = default;
  46850. #else
  46851. bool operator==( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  46852. {
  46853. return ( sType == rhs.sType )
  46854. && ( pNext == rhs.pNext )
  46855. && ( swapchain == rhs.swapchain );
  46856. }
  46857. bool operator!=( ImageSwapchainCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  46858. {
  46859. return !operator==( rhs );
  46860. }
  46861. #endif
  46862. public:
  46863. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageSwapchainCreateInfoKHR;
  46864. const void* pNext = {};
  46865. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain = {};
  46866. };
  46867. static_assert( sizeof( ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ), "struct and wrapper have different size!" );
  46868. static_assert( std::is_standard_layout<ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  46869. template <>
  46870. struct CppType<StructureType, StructureType::eImageSwapchainCreateInfoKHR>
  46871. {
  46872. using Type = ImageSwapchainCreateInfoKHR;
  46873. };
  46874. struct ImageViewASTCDecodeModeEXT
  46875. {
  46876. static const bool allowDuplicate = false;
  46877. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewAstcDecodeModeEXT;
  46878. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46879. VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT(VULKAN_HPP_NAMESPACE::Format decodeMode_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
  46880. : decodeMode( decodeMode_ )
  46881. {}
  46882. VULKAN_HPP_CONSTEXPR ImageViewASTCDecodeModeEXT( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46883. ImageViewASTCDecodeModeEXT( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46884. : ImageViewASTCDecodeModeEXT( *reinterpret_cast<ImageViewASTCDecodeModeEXT const *>( &rhs ) )
  46885. {}
  46886. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46887. VULKAN_HPP_CONSTEXPR_14 ImageViewASTCDecodeModeEXT & operator=( ImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46888. ImageViewASTCDecodeModeEXT & operator=( VkImageViewASTCDecodeModeEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  46889. {
  46890. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT const *>( &rhs );
  46891. return *this;
  46892. }
  46893. ImageViewASTCDecodeModeEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46894. {
  46895. pNext = pNext_;
  46896. return *this;
  46897. }
  46898. ImageViewASTCDecodeModeEXT & setDecodeMode( VULKAN_HPP_NAMESPACE::Format decodeMode_ ) VULKAN_HPP_NOEXCEPT
  46899. {
  46900. decodeMode = decodeMode_;
  46901. return *this;
  46902. }
  46903. operator VkImageViewASTCDecodeModeEXT const&() const VULKAN_HPP_NOEXCEPT
  46904. {
  46905. return *reinterpret_cast<const VkImageViewASTCDecodeModeEXT*>( this );
  46906. }
  46907. operator VkImageViewASTCDecodeModeEXT &() VULKAN_HPP_NOEXCEPT
  46908. {
  46909. return *reinterpret_cast<VkImageViewASTCDecodeModeEXT*>( this );
  46910. }
  46911. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46912. auto operator<=>( ImageViewASTCDecodeModeEXT const& ) const = default;
  46913. #else
  46914. bool operator==( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46915. {
  46916. return ( sType == rhs.sType )
  46917. && ( pNext == rhs.pNext )
  46918. && ( decodeMode == rhs.decodeMode );
  46919. }
  46920. bool operator!=( ImageViewASTCDecodeModeEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  46921. {
  46922. return !operator==( rhs );
  46923. }
  46924. #endif
  46925. public:
  46926. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewAstcDecodeModeEXT;
  46927. const void* pNext = {};
  46928. VULKAN_HPP_NAMESPACE::Format decodeMode = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  46929. };
  46930. static_assert( sizeof( ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ), "struct and wrapper have different size!" );
  46931. static_assert( std::is_standard_layout<ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
  46932. template <>
  46933. struct CppType<StructureType, StructureType::eImageViewAstcDecodeModeEXT>
  46934. {
  46935. using Type = ImageViewASTCDecodeModeEXT;
  46936. };
  46937. struct ImageViewUsageCreateInfo
  46938. {
  46939. static const bool allowDuplicate = false;
  46940. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImageViewUsageCreateInfo;
  46941. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46942. VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo(VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ = {}) VULKAN_HPP_NOEXCEPT
  46943. : usage( usage_ )
  46944. {}
  46945. VULKAN_HPP_CONSTEXPR ImageViewUsageCreateInfo( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46946. ImageViewUsageCreateInfo( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46947. : ImageViewUsageCreateInfo( *reinterpret_cast<ImageViewUsageCreateInfo const *>( &rhs ) )
  46948. {}
  46949. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  46950. VULKAN_HPP_CONSTEXPR_14 ImageViewUsageCreateInfo & operator=( ImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  46951. ImageViewUsageCreateInfo & operator=( VkImageViewUsageCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  46952. {
  46953. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo const *>( &rhs );
  46954. return *this;
  46955. }
  46956. ImageViewUsageCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  46957. {
  46958. pNext = pNext_;
  46959. return *this;
  46960. }
  46961. ImageViewUsageCreateInfo & setUsage( VULKAN_HPP_NAMESPACE::ImageUsageFlags usage_ ) VULKAN_HPP_NOEXCEPT
  46962. {
  46963. usage = usage_;
  46964. return *this;
  46965. }
  46966. operator VkImageViewUsageCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  46967. {
  46968. return *reinterpret_cast<const VkImageViewUsageCreateInfo*>( this );
  46969. }
  46970. operator VkImageViewUsageCreateInfo &() VULKAN_HPP_NOEXCEPT
  46971. {
  46972. return *reinterpret_cast<VkImageViewUsageCreateInfo*>( this );
  46973. }
  46974. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  46975. auto operator<=>( ImageViewUsageCreateInfo const& ) const = default;
  46976. #else
  46977. bool operator==( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46978. {
  46979. return ( sType == rhs.sType )
  46980. && ( pNext == rhs.pNext )
  46981. && ( usage == rhs.usage );
  46982. }
  46983. bool operator!=( ImageViewUsageCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  46984. {
  46985. return !operator==( rhs );
  46986. }
  46987. #endif
  46988. public:
  46989. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImageViewUsageCreateInfo;
  46990. const void* pNext = {};
  46991. VULKAN_HPP_NAMESPACE::ImageUsageFlags usage = {};
  46992. };
  46993. static_assert( sizeof( ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ), "struct and wrapper have different size!" );
  46994. static_assert( std::is_standard_layout<ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
  46995. template <>
  46996. struct CppType<StructureType, StructureType::eImageViewUsageCreateInfo>
  46997. {
  46998. using Type = ImageViewUsageCreateInfo;
  46999. };
  47000. using ImageViewUsageCreateInfoKHR = ImageViewUsageCreateInfo;
  47001. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  47002. struct ImportAndroidHardwareBufferInfoANDROID
  47003. {
  47004. static const bool allowDuplicate = false;
  47005. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
  47006. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47007. VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID(struct AHardwareBuffer* buffer_ = {}) VULKAN_HPP_NOEXCEPT
  47008. : buffer( buffer_ )
  47009. {}
  47010. VULKAN_HPP_CONSTEXPR ImportAndroidHardwareBufferInfoANDROID( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47011. ImportAndroidHardwareBufferInfoANDROID( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  47012. : ImportAndroidHardwareBufferInfoANDROID( *reinterpret_cast<ImportAndroidHardwareBufferInfoANDROID const *>( &rhs ) )
  47013. {}
  47014. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47015. VULKAN_HPP_CONSTEXPR_14 ImportAndroidHardwareBufferInfoANDROID & operator=( ImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47016. ImportAndroidHardwareBufferInfoANDROID & operator=( VkImportAndroidHardwareBufferInfoANDROID const & rhs ) VULKAN_HPP_NOEXCEPT
  47017. {
  47018. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID const *>( &rhs );
  47019. return *this;
  47020. }
  47021. ImportAndroidHardwareBufferInfoANDROID & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47022. {
  47023. pNext = pNext_;
  47024. return *this;
  47025. }
  47026. ImportAndroidHardwareBufferInfoANDROID & setBuffer( struct AHardwareBuffer* buffer_ ) VULKAN_HPP_NOEXCEPT
  47027. {
  47028. buffer = buffer_;
  47029. return *this;
  47030. }
  47031. operator VkImportAndroidHardwareBufferInfoANDROID const&() const VULKAN_HPP_NOEXCEPT
  47032. {
  47033. return *reinterpret_cast<const VkImportAndroidHardwareBufferInfoANDROID*>( this );
  47034. }
  47035. operator VkImportAndroidHardwareBufferInfoANDROID &() VULKAN_HPP_NOEXCEPT
  47036. {
  47037. return *reinterpret_cast<VkImportAndroidHardwareBufferInfoANDROID*>( this );
  47038. }
  47039. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47040. auto operator<=>( ImportAndroidHardwareBufferInfoANDROID const& ) const = default;
  47041. #else
  47042. bool operator==( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  47043. {
  47044. return ( sType == rhs.sType )
  47045. && ( pNext == rhs.pNext )
  47046. && ( buffer == rhs.buffer );
  47047. }
  47048. bool operator!=( ImportAndroidHardwareBufferInfoANDROID const& rhs ) const VULKAN_HPP_NOEXCEPT
  47049. {
  47050. return !operator==( rhs );
  47051. }
  47052. #endif
  47053. public:
  47054. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportAndroidHardwareBufferInfoANDROID;
  47055. const void* pNext = {};
  47056. struct AHardwareBuffer* buffer = {};
  47057. };
  47058. static_assert( sizeof( ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ), "struct and wrapper have different size!" );
  47059. static_assert( std::is_standard_layout<ImportAndroidHardwareBufferInfoANDROID>::value, "struct wrapper is not a standard layout!" );
  47060. template <>
  47061. struct CppType<StructureType, StructureType::eImportAndroidHardwareBufferInfoANDROID>
  47062. {
  47063. using Type = ImportAndroidHardwareBufferInfoANDROID;
  47064. };
  47065. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  47066. struct ImportMemoryFdInfoKHR
  47067. {
  47068. static const bool allowDuplicate = false;
  47069. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryFdInfoKHR;
  47070. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47071. VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, int fd_ = {}) VULKAN_HPP_NOEXCEPT
  47072. : handleType( handleType_ ), fd( fd_ )
  47073. {}
  47074. VULKAN_HPP_CONSTEXPR ImportMemoryFdInfoKHR( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47075. ImportMemoryFdInfoKHR( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  47076. : ImportMemoryFdInfoKHR( *reinterpret_cast<ImportMemoryFdInfoKHR const *>( &rhs ) )
  47077. {}
  47078. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47079. VULKAN_HPP_CONSTEXPR_14 ImportMemoryFdInfoKHR & operator=( ImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47080. ImportMemoryFdInfoKHR & operator=( VkImportMemoryFdInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  47081. {
  47082. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR const *>( &rhs );
  47083. return *this;
  47084. }
  47085. ImportMemoryFdInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47086. {
  47087. pNext = pNext_;
  47088. return *this;
  47089. }
  47090. ImportMemoryFdInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  47091. {
  47092. handleType = handleType_;
  47093. return *this;
  47094. }
  47095. ImportMemoryFdInfoKHR & setFd( int fd_ ) VULKAN_HPP_NOEXCEPT
  47096. {
  47097. fd = fd_;
  47098. return *this;
  47099. }
  47100. operator VkImportMemoryFdInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  47101. {
  47102. return *reinterpret_cast<const VkImportMemoryFdInfoKHR*>( this );
  47103. }
  47104. operator VkImportMemoryFdInfoKHR &() VULKAN_HPP_NOEXCEPT
  47105. {
  47106. return *reinterpret_cast<VkImportMemoryFdInfoKHR*>( this );
  47107. }
  47108. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47109. auto operator<=>( ImportMemoryFdInfoKHR const& ) const = default;
  47110. #else
  47111. bool operator==( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  47112. {
  47113. return ( sType == rhs.sType )
  47114. && ( pNext == rhs.pNext )
  47115. && ( handleType == rhs.handleType )
  47116. && ( fd == rhs.fd );
  47117. }
  47118. bool operator!=( ImportMemoryFdInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  47119. {
  47120. return !operator==( rhs );
  47121. }
  47122. #endif
  47123. public:
  47124. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryFdInfoKHR;
  47125. const void* pNext = {};
  47126. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  47127. int fd = {};
  47128. };
  47129. static_assert( sizeof( ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ), "struct and wrapper have different size!" );
  47130. static_assert( std::is_standard_layout<ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
  47131. template <>
  47132. struct CppType<StructureType, StructureType::eImportMemoryFdInfoKHR>
  47133. {
  47134. using Type = ImportMemoryFdInfoKHR;
  47135. };
  47136. struct ImportMemoryHostPointerInfoEXT
  47137. {
  47138. static const bool allowDuplicate = false;
  47139. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryHostPointerInfoEXT;
  47140. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47141. VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, void* pHostPointer_ = {}) VULKAN_HPP_NOEXCEPT
  47142. : handleType( handleType_ ), pHostPointer( pHostPointer_ )
  47143. {}
  47144. VULKAN_HPP_CONSTEXPR ImportMemoryHostPointerInfoEXT( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47145. ImportMemoryHostPointerInfoEXT( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47146. : ImportMemoryHostPointerInfoEXT( *reinterpret_cast<ImportMemoryHostPointerInfoEXT const *>( &rhs ) )
  47147. {}
  47148. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47149. VULKAN_HPP_CONSTEXPR_14 ImportMemoryHostPointerInfoEXT & operator=( ImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47150. ImportMemoryHostPointerInfoEXT & operator=( VkImportMemoryHostPointerInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47151. {
  47152. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT const *>( &rhs );
  47153. return *this;
  47154. }
  47155. ImportMemoryHostPointerInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47156. {
  47157. pNext = pNext_;
  47158. return *this;
  47159. }
  47160. ImportMemoryHostPointerInfoEXT & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  47161. {
  47162. handleType = handleType_;
  47163. return *this;
  47164. }
  47165. ImportMemoryHostPointerInfoEXT & setPHostPointer( void* pHostPointer_ ) VULKAN_HPP_NOEXCEPT
  47166. {
  47167. pHostPointer = pHostPointer_;
  47168. return *this;
  47169. }
  47170. operator VkImportMemoryHostPointerInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  47171. {
  47172. return *reinterpret_cast<const VkImportMemoryHostPointerInfoEXT*>( this );
  47173. }
  47174. operator VkImportMemoryHostPointerInfoEXT &() VULKAN_HPP_NOEXCEPT
  47175. {
  47176. return *reinterpret_cast<VkImportMemoryHostPointerInfoEXT*>( this );
  47177. }
  47178. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47179. auto operator<=>( ImportMemoryHostPointerInfoEXT const& ) const = default;
  47180. #else
  47181. bool operator==( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47182. {
  47183. return ( sType == rhs.sType )
  47184. && ( pNext == rhs.pNext )
  47185. && ( handleType == rhs.handleType )
  47186. && ( pHostPointer == rhs.pHostPointer );
  47187. }
  47188. bool operator!=( ImportMemoryHostPointerInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47189. {
  47190. return !operator==( rhs );
  47191. }
  47192. #endif
  47193. public:
  47194. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryHostPointerInfoEXT;
  47195. const void* pNext = {};
  47196. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  47197. void* pHostPointer = {};
  47198. };
  47199. static_assert( sizeof( ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ), "struct and wrapper have different size!" );
  47200. static_assert( std::is_standard_layout<ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
  47201. template <>
  47202. struct CppType<StructureType, StructureType::eImportMemoryHostPointerInfoEXT>
  47203. {
  47204. using Type = ImportMemoryHostPointerInfoEXT;
  47205. };
  47206. #ifdef VK_USE_PLATFORM_WIN32_KHR
  47207. struct ImportMemoryWin32HandleInfoKHR
  47208. {
  47209. static const bool allowDuplicate = false;
  47210. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoKHR;
  47211. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47212. VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd, HANDLE handle_ = {}, LPCWSTR name_ = {}) VULKAN_HPP_NOEXCEPT
  47213. : handleType( handleType_ ), handle( handle_ ), name( name_ )
  47214. {}
  47215. VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoKHR( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47216. ImportMemoryWin32HandleInfoKHR( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  47217. : ImportMemoryWin32HandleInfoKHR( *reinterpret_cast<ImportMemoryWin32HandleInfoKHR const *>( &rhs ) )
  47218. {}
  47219. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47220. VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoKHR & operator=( ImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47221. ImportMemoryWin32HandleInfoKHR & operator=( VkImportMemoryWin32HandleInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  47222. {
  47223. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR const *>( &rhs );
  47224. return *this;
  47225. }
  47226. ImportMemoryWin32HandleInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47227. {
  47228. pNext = pNext_;
  47229. return *this;
  47230. }
  47231. ImportMemoryWin32HandleInfoKHR & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  47232. {
  47233. handleType = handleType_;
  47234. return *this;
  47235. }
  47236. ImportMemoryWin32HandleInfoKHR & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
  47237. {
  47238. handle = handle_;
  47239. return *this;
  47240. }
  47241. ImportMemoryWin32HandleInfoKHR & setName( LPCWSTR name_ ) VULKAN_HPP_NOEXCEPT
  47242. {
  47243. name = name_;
  47244. return *this;
  47245. }
  47246. operator VkImportMemoryWin32HandleInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  47247. {
  47248. return *reinterpret_cast<const VkImportMemoryWin32HandleInfoKHR*>( this );
  47249. }
  47250. operator VkImportMemoryWin32HandleInfoKHR &() VULKAN_HPP_NOEXCEPT
  47251. {
  47252. return *reinterpret_cast<VkImportMemoryWin32HandleInfoKHR*>( this );
  47253. }
  47254. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47255. auto operator<=>( ImportMemoryWin32HandleInfoKHR const& ) const = default;
  47256. #else
  47257. bool operator==( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  47258. {
  47259. return ( sType == rhs.sType )
  47260. && ( pNext == rhs.pNext )
  47261. && ( handleType == rhs.handleType )
  47262. && ( handle == rhs.handle )
  47263. && ( name == rhs.name );
  47264. }
  47265. bool operator!=( ImportMemoryWin32HandleInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  47266. {
  47267. return !operator==( rhs );
  47268. }
  47269. #endif
  47270. public:
  47271. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoKHR;
  47272. const void* pNext = {};
  47273. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  47274. HANDLE handle = {};
  47275. LPCWSTR name = {};
  47276. };
  47277. static_assert( sizeof( ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ), "struct and wrapper have different size!" );
  47278. static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
  47279. template <>
  47280. struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoKHR>
  47281. {
  47282. using Type = ImportMemoryWin32HandleInfoKHR;
  47283. };
  47284. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  47285. #ifdef VK_USE_PLATFORM_WIN32_KHR
  47286. struct ImportMemoryWin32HandleInfoNV
  47287. {
  47288. static const bool allowDuplicate = false;
  47289. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eImportMemoryWin32HandleInfoNV;
  47290. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47291. VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ = {}, HANDLE handle_ = {}) VULKAN_HPP_NOEXCEPT
  47292. : handleType( handleType_ ), handle( handle_ )
  47293. {}
  47294. VULKAN_HPP_CONSTEXPR ImportMemoryWin32HandleInfoNV( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47295. ImportMemoryWin32HandleInfoNV( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  47296. : ImportMemoryWin32HandleInfoNV( *reinterpret_cast<ImportMemoryWin32HandleInfoNV const *>( &rhs ) )
  47297. {}
  47298. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47299. VULKAN_HPP_CONSTEXPR_14 ImportMemoryWin32HandleInfoNV & operator=( ImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47300. ImportMemoryWin32HandleInfoNV & operator=( VkImportMemoryWin32HandleInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  47301. {
  47302. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV const *>( &rhs );
  47303. return *this;
  47304. }
  47305. ImportMemoryWin32HandleInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47306. {
  47307. pNext = pNext_;
  47308. return *this;
  47309. }
  47310. ImportMemoryWin32HandleInfoNV & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType_ ) VULKAN_HPP_NOEXCEPT
  47311. {
  47312. handleType = handleType_;
  47313. return *this;
  47314. }
  47315. ImportMemoryWin32HandleInfoNV & setHandle( HANDLE handle_ ) VULKAN_HPP_NOEXCEPT
  47316. {
  47317. handle = handle_;
  47318. return *this;
  47319. }
  47320. operator VkImportMemoryWin32HandleInfoNV const&() const VULKAN_HPP_NOEXCEPT
  47321. {
  47322. return *reinterpret_cast<const VkImportMemoryWin32HandleInfoNV*>( this );
  47323. }
  47324. operator VkImportMemoryWin32HandleInfoNV &() VULKAN_HPP_NOEXCEPT
  47325. {
  47326. return *reinterpret_cast<VkImportMemoryWin32HandleInfoNV*>( this );
  47327. }
  47328. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47329. auto operator<=>( ImportMemoryWin32HandleInfoNV const& ) const = default;
  47330. #else
  47331. bool operator==( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  47332. {
  47333. return ( sType == rhs.sType )
  47334. && ( pNext == rhs.pNext )
  47335. && ( handleType == rhs.handleType )
  47336. && ( handle == rhs.handle );
  47337. }
  47338. bool operator!=( ImportMemoryWin32HandleInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  47339. {
  47340. return !operator==( rhs );
  47341. }
  47342. #endif
  47343. public:
  47344. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eImportMemoryWin32HandleInfoNV;
  47345. const void* pNext = {};
  47346. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType = {};
  47347. HANDLE handle = {};
  47348. };
  47349. static_assert( sizeof( ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ), "struct and wrapper have different size!" );
  47350. static_assert( std::is_standard_layout<ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
  47351. template <>
  47352. struct CppType<StructureType, StructureType::eImportMemoryWin32HandleInfoNV>
  47353. {
  47354. using Type = ImportMemoryWin32HandleInfoNV;
  47355. };
  47356. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  47357. struct InputAttachmentAspectReference
  47358. {
  47359. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47360. VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference(uint32_t subpass_ = {}, uint32_t inputAttachmentIndex_ = {}, VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ = {}) VULKAN_HPP_NOEXCEPT
  47361. : subpass( subpass_ ), inputAttachmentIndex( inputAttachmentIndex_ ), aspectMask( aspectMask_ )
  47362. {}
  47363. VULKAN_HPP_CONSTEXPR InputAttachmentAspectReference( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47364. InputAttachmentAspectReference( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
  47365. : InputAttachmentAspectReference( *reinterpret_cast<InputAttachmentAspectReference const *>( &rhs ) )
  47366. {}
  47367. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47368. VULKAN_HPP_CONSTEXPR_14 InputAttachmentAspectReference & operator=( InputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47369. InputAttachmentAspectReference & operator=( VkInputAttachmentAspectReference const & rhs ) VULKAN_HPP_NOEXCEPT
  47370. {
  47371. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference const *>( &rhs );
  47372. return *this;
  47373. }
  47374. InputAttachmentAspectReference & setSubpass( uint32_t subpass_ ) VULKAN_HPP_NOEXCEPT
  47375. {
  47376. subpass = subpass_;
  47377. return *this;
  47378. }
  47379. InputAttachmentAspectReference & setInputAttachmentIndex( uint32_t inputAttachmentIndex_ ) VULKAN_HPP_NOEXCEPT
  47380. {
  47381. inputAttachmentIndex = inputAttachmentIndex_;
  47382. return *this;
  47383. }
  47384. InputAttachmentAspectReference & setAspectMask( VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask_ ) VULKAN_HPP_NOEXCEPT
  47385. {
  47386. aspectMask = aspectMask_;
  47387. return *this;
  47388. }
  47389. operator VkInputAttachmentAspectReference const&() const VULKAN_HPP_NOEXCEPT
  47390. {
  47391. return *reinterpret_cast<const VkInputAttachmentAspectReference*>( this );
  47392. }
  47393. operator VkInputAttachmentAspectReference &() VULKAN_HPP_NOEXCEPT
  47394. {
  47395. return *reinterpret_cast<VkInputAttachmentAspectReference*>( this );
  47396. }
  47397. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47398. auto operator<=>( InputAttachmentAspectReference const& ) const = default;
  47399. #else
  47400. bool operator==( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
  47401. {
  47402. return ( subpass == rhs.subpass )
  47403. && ( inputAttachmentIndex == rhs.inputAttachmentIndex )
  47404. && ( aspectMask == rhs.aspectMask );
  47405. }
  47406. bool operator!=( InputAttachmentAspectReference const& rhs ) const VULKAN_HPP_NOEXCEPT
  47407. {
  47408. return !operator==( rhs );
  47409. }
  47410. #endif
  47411. public:
  47412. uint32_t subpass = {};
  47413. uint32_t inputAttachmentIndex = {};
  47414. VULKAN_HPP_NAMESPACE::ImageAspectFlags aspectMask = {};
  47415. };
  47416. static_assert( sizeof( InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ), "struct and wrapper have different size!" );
  47417. static_assert( std::is_standard_layout<InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
  47418. using InputAttachmentAspectReferenceKHR = InputAttachmentAspectReference;
  47419. struct InstanceCreateInfo
  47420. {
  47421. static const bool allowDuplicate = false;
  47422. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eInstanceCreateInfo;
  47423. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47424. VULKAN_HPP_CONSTEXPR InstanceCreateInfo(VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ = {}, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ = {}, uint32_t enabledLayerCount_ = {}, const char* const * ppEnabledLayerNames_ = {}, uint32_t enabledExtensionCount_ = {}, const char* const * ppEnabledExtensionNames_ = {}) VULKAN_HPP_NOEXCEPT
  47425. : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( enabledLayerCount_ ), ppEnabledLayerNames( ppEnabledLayerNames_ ), enabledExtensionCount( enabledExtensionCount_ ), ppEnabledExtensionNames( ppEnabledExtensionNames_ )
  47426. {}
  47427. VULKAN_HPP_CONSTEXPR InstanceCreateInfo( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47428. InstanceCreateInfo( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47429. : InstanceCreateInfo( *reinterpret_cast<InstanceCreateInfo const *>( &rhs ) )
  47430. {}
  47431. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47432. InstanceCreateInfo( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_, const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ = {} )
  47433. : flags( flags_ ), pApplicationInfo( pApplicationInfo_ ), enabledLayerCount( static_cast<uint32_t>( pEnabledLayerNames_.size() ) ), ppEnabledLayerNames( pEnabledLayerNames_.data() ), enabledExtensionCount( static_cast<uint32_t>( pEnabledExtensionNames_.size() ) ), ppEnabledExtensionNames( pEnabledExtensionNames_.data() )
  47434. {}
  47435. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47436. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47437. VULKAN_HPP_CONSTEXPR_14 InstanceCreateInfo & operator=( InstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47438. InstanceCreateInfo & operator=( VkInstanceCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47439. {
  47440. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::InstanceCreateInfo const *>( &rhs );
  47441. return *this;
  47442. }
  47443. InstanceCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47444. {
  47445. pNext = pNext_;
  47446. return *this;
  47447. }
  47448. InstanceCreateInfo & setFlags( VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  47449. {
  47450. flags = flags_;
  47451. return *this;
  47452. }
  47453. InstanceCreateInfo & setPApplicationInfo( const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo_ ) VULKAN_HPP_NOEXCEPT
  47454. {
  47455. pApplicationInfo = pApplicationInfo_;
  47456. return *this;
  47457. }
  47458. InstanceCreateInfo & setEnabledLayerCount( uint32_t enabledLayerCount_ ) VULKAN_HPP_NOEXCEPT
  47459. {
  47460. enabledLayerCount = enabledLayerCount_;
  47461. return *this;
  47462. }
  47463. InstanceCreateInfo & setPpEnabledLayerNames( const char* const * ppEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
  47464. {
  47465. ppEnabledLayerNames = ppEnabledLayerNames_;
  47466. return *this;
  47467. }
  47468. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47469. InstanceCreateInfo & setPEnabledLayerNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledLayerNames_ ) VULKAN_HPP_NOEXCEPT
  47470. {
  47471. enabledLayerCount = static_cast<uint32_t>( pEnabledLayerNames_.size() );
  47472. ppEnabledLayerNames = pEnabledLayerNames_.data();
  47473. return *this;
  47474. }
  47475. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47476. InstanceCreateInfo & setEnabledExtensionCount( uint32_t enabledExtensionCount_ ) VULKAN_HPP_NOEXCEPT
  47477. {
  47478. enabledExtensionCount = enabledExtensionCount_;
  47479. return *this;
  47480. }
  47481. InstanceCreateInfo & setPpEnabledExtensionNames( const char* const * ppEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
  47482. {
  47483. ppEnabledExtensionNames = ppEnabledExtensionNames_;
  47484. return *this;
  47485. }
  47486. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47487. InstanceCreateInfo & setPEnabledExtensionNames( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const char* const > const & pEnabledExtensionNames_ ) VULKAN_HPP_NOEXCEPT
  47488. {
  47489. enabledExtensionCount = static_cast<uint32_t>( pEnabledExtensionNames_.size() );
  47490. ppEnabledExtensionNames = pEnabledExtensionNames_.data();
  47491. return *this;
  47492. }
  47493. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  47494. operator VkInstanceCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  47495. {
  47496. return *reinterpret_cast<const VkInstanceCreateInfo*>( this );
  47497. }
  47498. operator VkInstanceCreateInfo &() VULKAN_HPP_NOEXCEPT
  47499. {
  47500. return *reinterpret_cast<VkInstanceCreateInfo*>( this );
  47501. }
  47502. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47503. auto operator<=>( InstanceCreateInfo const& ) const = default;
  47504. #else
  47505. bool operator==( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47506. {
  47507. return ( sType == rhs.sType )
  47508. && ( pNext == rhs.pNext )
  47509. && ( flags == rhs.flags )
  47510. && ( pApplicationInfo == rhs.pApplicationInfo )
  47511. && ( enabledLayerCount == rhs.enabledLayerCount )
  47512. && ( ppEnabledLayerNames == rhs.ppEnabledLayerNames )
  47513. && ( enabledExtensionCount == rhs.enabledExtensionCount )
  47514. && ( ppEnabledExtensionNames == rhs.ppEnabledExtensionNames );
  47515. }
  47516. bool operator!=( InstanceCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47517. {
  47518. return !operator==( rhs );
  47519. }
  47520. #endif
  47521. public:
  47522. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eInstanceCreateInfo;
  47523. const void* pNext = {};
  47524. VULKAN_HPP_NAMESPACE::InstanceCreateFlags flags = {};
  47525. const VULKAN_HPP_NAMESPACE::ApplicationInfo* pApplicationInfo = {};
  47526. uint32_t enabledLayerCount = {};
  47527. const char* const * ppEnabledLayerNames = {};
  47528. uint32_t enabledExtensionCount = {};
  47529. const char* const * ppEnabledExtensionNames = {};
  47530. };
  47531. static_assert( sizeof( InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
  47532. static_assert( std::is_standard_layout<InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
  47533. template <>
  47534. struct CppType<StructureType, StructureType::eInstanceCreateInfo>
  47535. {
  47536. using Type = InstanceCreateInfo;
  47537. };
  47538. #ifdef VK_USE_PLATFORM_MACOS_MVK
  47539. struct MacOSSurfaceCreateInfoMVK
  47540. {
  47541. static const bool allowDuplicate = false;
  47542. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMacosSurfaceCreateInfoMVK;
  47543. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47544. VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK(VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ = {}, const void* pView_ = {}) VULKAN_HPP_NOEXCEPT
  47545. : flags( flags_ ), pView( pView_ )
  47546. {}
  47547. VULKAN_HPP_CONSTEXPR MacOSSurfaceCreateInfoMVK( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47548. MacOSSurfaceCreateInfoMVK( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
  47549. : MacOSSurfaceCreateInfoMVK( *reinterpret_cast<MacOSSurfaceCreateInfoMVK const *>( &rhs ) )
  47550. {}
  47551. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47552. VULKAN_HPP_CONSTEXPR_14 MacOSSurfaceCreateInfoMVK & operator=( MacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47553. MacOSSurfaceCreateInfoMVK & operator=( VkMacOSSurfaceCreateInfoMVK const & rhs ) VULKAN_HPP_NOEXCEPT
  47554. {
  47555. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK const *>( &rhs );
  47556. return *this;
  47557. }
  47558. MacOSSurfaceCreateInfoMVK & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47559. {
  47560. pNext = pNext_;
  47561. return *this;
  47562. }
  47563. MacOSSurfaceCreateInfoMVK & setFlags( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags_ ) VULKAN_HPP_NOEXCEPT
  47564. {
  47565. flags = flags_;
  47566. return *this;
  47567. }
  47568. MacOSSurfaceCreateInfoMVK & setPView( const void* pView_ ) VULKAN_HPP_NOEXCEPT
  47569. {
  47570. pView = pView_;
  47571. return *this;
  47572. }
  47573. operator VkMacOSSurfaceCreateInfoMVK const&() const VULKAN_HPP_NOEXCEPT
  47574. {
  47575. return *reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK*>( this );
  47576. }
  47577. operator VkMacOSSurfaceCreateInfoMVK &() VULKAN_HPP_NOEXCEPT
  47578. {
  47579. return *reinterpret_cast<VkMacOSSurfaceCreateInfoMVK*>( this );
  47580. }
  47581. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47582. auto operator<=>( MacOSSurfaceCreateInfoMVK const& ) const = default;
  47583. #else
  47584. bool operator==( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
  47585. {
  47586. return ( sType == rhs.sType )
  47587. && ( pNext == rhs.pNext )
  47588. && ( flags == rhs.flags )
  47589. && ( pView == rhs.pView );
  47590. }
  47591. bool operator!=( MacOSSurfaceCreateInfoMVK const& rhs ) const VULKAN_HPP_NOEXCEPT
  47592. {
  47593. return !operator==( rhs );
  47594. }
  47595. #endif
  47596. public:
  47597. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMacosSurfaceCreateInfoMVK;
  47598. const void* pNext = {};
  47599. VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK flags = {};
  47600. const void* pView = {};
  47601. };
  47602. static_assert( sizeof( MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ), "struct and wrapper have different size!" );
  47603. static_assert( std::is_standard_layout<MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
  47604. template <>
  47605. struct CppType<StructureType, StructureType::eMacosSurfaceCreateInfoMVK>
  47606. {
  47607. using Type = MacOSSurfaceCreateInfoMVK;
  47608. };
  47609. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  47610. struct MemoryAllocateFlagsInfo
  47611. {
  47612. static const bool allowDuplicate = false;
  47613. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryAllocateFlagsInfo;
  47614. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47615. VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo(VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ = {}, uint32_t deviceMask_ = {}) VULKAN_HPP_NOEXCEPT
  47616. : flags( flags_ ), deviceMask( deviceMask_ )
  47617. {}
  47618. VULKAN_HPP_CONSTEXPR MemoryAllocateFlagsInfo( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47619. MemoryAllocateFlagsInfo( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47620. : MemoryAllocateFlagsInfo( *reinterpret_cast<MemoryAllocateFlagsInfo const *>( &rhs ) )
  47621. {}
  47622. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47623. VULKAN_HPP_CONSTEXPR_14 MemoryAllocateFlagsInfo & operator=( MemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47624. MemoryAllocateFlagsInfo & operator=( VkMemoryAllocateFlagsInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47625. {
  47626. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo const *>( &rhs );
  47627. return *this;
  47628. }
  47629. MemoryAllocateFlagsInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47630. {
  47631. pNext = pNext_;
  47632. return *this;
  47633. }
  47634. MemoryAllocateFlagsInfo & setFlags( VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags_ ) VULKAN_HPP_NOEXCEPT
  47635. {
  47636. flags = flags_;
  47637. return *this;
  47638. }
  47639. MemoryAllocateFlagsInfo & setDeviceMask( uint32_t deviceMask_ ) VULKAN_HPP_NOEXCEPT
  47640. {
  47641. deviceMask = deviceMask_;
  47642. return *this;
  47643. }
  47644. operator VkMemoryAllocateFlagsInfo const&() const VULKAN_HPP_NOEXCEPT
  47645. {
  47646. return *reinterpret_cast<const VkMemoryAllocateFlagsInfo*>( this );
  47647. }
  47648. operator VkMemoryAllocateFlagsInfo &() VULKAN_HPP_NOEXCEPT
  47649. {
  47650. return *reinterpret_cast<VkMemoryAllocateFlagsInfo*>( this );
  47651. }
  47652. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47653. auto operator<=>( MemoryAllocateFlagsInfo const& ) const = default;
  47654. #else
  47655. bool operator==( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47656. {
  47657. return ( sType == rhs.sType )
  47658. && ( pNext == rhs.pNext )
  47659. && ( flags == rhs.flags )
  47660. && ( deviceMask == rhs.deviceMask );
  47661. }
  47662. bool operator!=( MemoryAllocateFlagsInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47663. {
  47664. return !operator==( rhs );
  47665. }
  47666. #endif
  47667. public:
  47668. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryAllocateFlagsInfo;
  47669. const void* pNext = {};
  47670. VULKAN_HPP_NAMESPACE::MemoryAllocateFlags flags = {};
  47671. uint32_t deviceMask = {};
  47672. };
  47673. static_assert( sizeof( MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ), "struct and wrapper have different size!" );
  47674. static_assert( std::is_standard_layout<MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
  47675. template <>
  47676. struct CppType<StructureType, StructureType::eMemoryAllocateFlagsInfo>
  47677. {
  47678. using Type = MemoryAllocateFlagsInfo;
  47679. };
  47680. using MemoryAllocateFlagsInfoKHR = MemoryAllocateFlagsInfo;
  47681. struct MemoryDedicatedAllocateInfo
  47682. {
  47683. static const bool allowDuplicate = false;
  47684. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedAllocateInfo;
  47685. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47686. VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo(VULKAN_HPP_NAMESPACE::Image image_ = {}, VULKAN_HPP_NAMESPACE::Buffer buffer_ = {}) VULKAN_HPP_NOEXCEPT
  47687. : image( image_ ), buffer( buffer_ )
  47688. {}
  47689. VULKAN_HPP_CONSTEXPR MemoryDedicatedAllocateInfo( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47690. MemoryDedicatedAllocateInfo( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47691. : MemoryDedicatedAllocateInfo( *reinterpret_cast<MemoryDedicatedAllocateInfo const *>( &rhs ) )
  47692. {}
  47693. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47694. VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedAllocateInfo & operator=( MemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47695. MemoryDedicatedAllocateInfo & operator=( VkMemoryDedicatedAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47696. {
  47697. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo const *>( &rhs );
  47698. return *this;
  47699. }
  47700. MemoryDedicatedAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47701. {
  47702. pNext = pNext_;
  47703. return *this;
  47704. }
  47705. MemoryDedicatedAllocateInfo & setImage( VULKAN_HPP_NAMESPACE::Image image_ ) VULKAN_HPP_NOEXCEPT
  47706. {
  47707. image = image_;
  47708. return *this;
  47709. }
  47710. MemoryDedicatedAllocateInfo & setBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer_ ) VULKAN_HPP_NOEXCEPT
  47711. {
  47712. buffer = buffer_;
  47713. return *this;
  47714. }
  47715. operator VkMemoryDedicatedAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  47716. {
  47717. return *reinterpret_cast<const VkMemoryDedicatedAllocateInfo*>( this );
  47718. }
  47719. operator VkMemoryDedicatedAllocateInfo &() VULKAN_HPP_NOEXCEPT
  47720. {
  47721. return *reinterpret_cast<VkMemoryDedicatedAllocateInfo*>( this );
  47722. }
  47723. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47724. auto operator<=>( MemoryDedicatedAllocateInfo const& ) const = default;
  47725. #else
  47726. bool operator==( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47727. {
  47728. return ( sType == rhs.sType )
  47729. && ( pNext == rhs.pNext )
  47730. && ( image == rhs.image )
  47731. && ( buffer == rhs.buffer );
  47732. }
  47733. bool operator!=( MemoryDedicatedAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47734. {
  47735. return !operator==( rhs );
  47736. }
  47737. #endif
  47738. public:
  47739. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedAllocateInfo;
  47740. const void* pNext = {};
  47741. VULKAN_HPP_NAMESPACE::Image image = {};
  47742. VULKAN_HPP_NAMESPACE::Buffer buffer = {};
  47743. };
  47744. static_assert( sizeof( MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ), "struct and wrapper have different size!" );
  47745. static_assert( std::is_standard_layout<MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  47746. template <>
  47747. struct CppType<StructureType, StructureType::eMemoryDedicatedAllocateInfo>
  47748. {
  47749. using Type = MemoryDedicatedAllocateInfo;
  47750. };
  47751. using MemoryDedicatedAllocateInfoKHR = MemoryDedicatedAllocateInfo;
  47752. struct MemoryDedicatedRequirements
  47753. {
  47754. static const bool allowDuplicate = false;
  47755. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryDedicatedRequirements;
  47756. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47757. VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements(VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation_ = {}) VULKAN_HPP_NOEXCEPT
  47758. : prefersDedicatedAllocation( prefersDedicatedAllocation_ ), requiresDedicatedAllocation( requiresDedicatedAllocation_ )
  47759. {}
  47760. VULKAN_HPP_CONSTEXPR MemoryDedicatedRequirements( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47761. MemoryDedicatedRequirements( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  47762. : MemoryDedicatedRequirements( *reinterpret_cast<MemoryDedicatedRequirements const *>( &rhs ) )
  47763. {}
  47764. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47765. VULKAN_HPP_CONSTEXPR_14 MemoryDedicatedRequirements & operator=( MemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47766. MemoryDedicatedRequirements & operator=( VkMemoryDedicatedRequirements const & rhs ) VULKAN_HPP_NOEXCEPT
  47767. {
  47768. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements const *>( &rhs );
  47769. return *this;
  47770. }
  47771. operator VkMemoryDedicatedRequirements const&() const VULKAN_HPP_NOEXCEPT
  47772. {
  47773. return *reinterpret_cast<const VkMemoryDedicatedRequirements*>( this );
  47774. }
  47775. operator VkMemoryDedicatedRequirements &() VULKAN_HPP_NOEXCEPT
  47776. {
  47777. return *reinterpret_cast<VkMemoryDedicatedRequirements*>( this );
  47778. }
  47779. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47780. auto operator<=>( MemoryDedicatedRequirements const& ) const = default;
  47781. #else
  47782. bool operator==( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  47783. {
  47784. return ( sType == rhs.sType )
  47785. && ( pNext == rhs.pNext )
  47786. && ( prefersDedicatedAllocation == rhs.prefersDedicatedAllocation )
  47787. && ( requiresDedicatedAllocation == rhs.requiresDedicatedAllocation );
  47788. }
  47789. bool operator!=( MemoryDedicatedRequirements const& rhs ) const VULKAN_HPP_NOEXCEPT
  47790. {
  47791. return !operator==( rhs );
  47792. }
  47793. #endif
  47794. public:
  47795. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryDedicatedRequirements;
  47796. void* pNext = {};
  47797. VULKAN_HPP_NAMESPACE::Bool32 prefersDedicatedAllocation = {};
  47798. VULKAN_HPP_NAMESPACE::Bool32 requiresDedicatedAllocation = {};
  47799. };
  47800. static_assert( sizeof( MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ), "struct and wrapper have different size!" );
  47801. static_assert( std::is_standard_layout<MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
  47802. template <>
  47803. struct CppType<StructureType, StructureType::eMemoryDedicatedRequirements>
  47804. {
  47805. using Type = MemoryDedicatedRequirements;
  47806. };
  47807. using MemoryDedicatedRequirementsKHR = MemoryDedicatedRequirements;
  47808. struct MemoryOpaqueCaptureAddressAllocateInfo
  47809. {
  47810. static const bool allowDuplicate = false;
  47811. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
  47812. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47813. VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo(uint64_t opaqueCaptureAddress_ = {}) VULKAN_HPP_NOEXCEPT
  47814. : opaqueCaptureAddress( opaqueCaptureAddress_ )
  47815. {}
  47816. VULKAN_HPP_CONSTEXPR MemoryOpaqueCaptureAddressAllocateInfo( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47817. MemoryOpaqueCaptureAddressAllocateInfo( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47818. : MemoryOpaqueCaptureAddressAllocateInfo( *reinterpret_cast<MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs ) )
  47819. {}
  47820. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47821. VULKAN_HPP_CONSTEXPR_14 MemoryOpaqueCaptureAddressAllocateInfo & operator=( MemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47822. MemoryOpaqueCaptureAddressAllocateInfo & operator=( VkMemoryOpaqueCaptureAddressAllocateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  47823. {
  47824. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo const *>( &rhs );
  47825. return *this;
  47826. }
  47827. MemoryOpaqueCaptureAddressAllocateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47828. {
  47829. pNext = pNext_;
  47830. return *this;
  47831. }
  47832. MemoryOpaqueCaptureAddressAllocateInfo & setOpaqueCaptureAddress( uint64_t opaqueCaptureAddress_ ) VULKAN_HPP_NOEXCEPT
  47833. {
  47834. opaqueCaptureAddress = opaqueCaptureAddress_;
  47835. return *this;
  47836. }
  47837. operator VkMemoryOpaqueCaptureAddressAllocateInfo const&() const VULKAN_HPP_NOEXCEPT
  47838. {
  47839. return *reinterpret_cast<const VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
  47840. }
  47841. operator VkMemoryOpaqueCaptureAddressAllocateInfo &() VULKAN_HPP_NOEXCEPT
  47842. {
  47843. return *reinterpret_cast<VkMemoryOpaqueCaptureAddressAllocateInfo*>( this );
  47844. }
  47845. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47846. auto operator<=>( MemoryOpaqueCaptureAddressAllocateInfo const& ) const = default;
  47847. #else
  47848. bool operator==( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47849. {
  47850. return ( sType == rhs.sType )
  47851. && ( pNext == rhs.pNext )
  47852. && ( opaqueCaptureAddress == rhs.opaqueCaptureAddress );
  47853. }
  47854. bool operator!=( MemoryOpaqueCaptureAddressAllocateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  47855. {
  47856. return !operator==( rhs );
  47857. }
  47858. #endif
  47859. public:
  47860. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryOpaqueCaptureAddressAllocateInfo;
  47861. const void* pNext = {};
  47862. uint64_t opaqueCaptureAddress = {};
  47863. };
  47864. static_assert( sizeof( MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ), "struct and wrapper have different size!" );
  47865. static_assert( std::is_standard_layout<MemoryOpaqueCaptureAddressAllocateInfo>::value, "struct wrapper is not a standard layout!" );
  47866. template <>
  47867. struct CppType<StructureType, StructureType::eMemoryOpaqueCaptureAddressAllocateInfo>
  47868. {
  47869. using Type = MemoryOpaqueCaptureAddressAllocateInfo;
  47870. };
  47871. using MemoryOpaqueCaptureAddressAllocateInfoKHR = MemoryOpaqueCaptureAddressAllocateInfo;
  47872. struct MemoryPriorityAllocateInfoEXT
  47873. {
  47874. static const bool allowDuplicate = false;
  47875. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMemoryPriorityAllocateInfoEXT;
  47876. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47877. VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT(float priority_ = {}) VULKAN_HPP_NOEXCEPT
  47878. : priority( priority_ )
  47879. {}
  47880. VULKAN_HPP_CONSTEXPR MemoryPriorityAllocateInfoEXT( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47881. MemoryPriorityAllocateInfoEXT( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47882. : MemoryPriorityAllocateInfoEXT( *reinterpret_cast<MemoryPriorityAllocateInfoEXT const *>( &rhs ) )
  47883. {}
  47884. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47885. VULKAN_HPP_CONSTEXPR_14 MemoryPriorityAllocateInfoEXT & operator=( MemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47886. MemoryPriorityAllocateInfoEXT & operator=( VkMemoryPriorityAllocateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47887. {
  47888. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT const *>( &rhs );
  47889. return *this;
  47890. }
  47891. MemoryPriorityAllocateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47892. {
  47893. pNext = pNext_;
  47894. return *this;
  47895. }
  47896. MemoryPriorityAllocateInfoEXT & setPriority( float priority_ ) VULKAN_HPP_NOEXCEPT
  47897. {
  47898. priority = priority_;
  47899. return *this;
  47900. }
  47901. operator VkMemoryPriorityAllocateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  47902. {
  47903. return *reinterpret_cast<const VkMemoryPriorityAllocateInfoEXT*>( this );
  47904. }
  47905. operator VkMemoryPriorityAllocateInfoEXT &() VULKAN_HPP_NOEXCEPT
  47906. {
  47907. return *reinterpret_cast<VkMemoryPriorityAllocateInfoEXT*>( this );
  47908. }
  47909. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47910. auto operator<=>( MemoryPriorityAllocateInfoEXT const& ) const = default;
  47911. #else
  47912. bool operator==( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47913. {
  47914. return ( sType == rhs.sType )
  47915. && ( pNext == rhs.pNext )
  47916. && ( priority == rhs.priority );
  47917. }
  47918. bool operator!=( MemoryPriorityAllocateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47919. {
  47920. return !operator==( rhs );
  47921. }
  47922. #endif
  47923. public:
  47924. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMemoryPriorityAllocateInfoEXT;
  47925. const void* pNext = {};
  47926. float priority = {};
  47927. };
  47928. static_assert( sizeof( MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ), "struct and wrapper have different size!" );
  47929. static_assert( std::is_standard_layout<MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  47930. template <>
  47931. struct CppType<StructureType, StructureType::eMemoryPriorityAllocateInfoEXT>
  47932. {
  47933. using Type = MemoryPriorityAllocateInfoEXT;
  47934. };
  47935. #ifdef VK_USE_PLATFORM_METAL_EXT
  47936. struct MetalSurfaceCreateInfoEXT
  47937. {
  47938. static const bool allowDuplicate = false;
  47939. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMetalSurfaceCreateInfoEXT;
  47940. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47941. VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT(VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ = {}, const CAMetalLayer* pLayer_ = {}) VULKAN_HPP_NOEXCEPT
  47942. : flags( flags_ ), pLayer( pLayer_ )
  47943. {}
  47944. VULKAN_HPP_CONSTEXPR MetalSurfaceCreateInfoEXT( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47945. MetalSurfaceCreateInfoEXT( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47946. : MetalSurfaceCreateInfoEXT( *reinterpret_cast<MetalSurfaceCreateInfoEXT const *>( &rhs ) )
  47947. {}
  47948. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  47949. VULKAN_HPP_CONSTEXPR_14 MetalSurfaceCreateInfoEXT & operator=( MetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  47950. MetalSurfaceCreateInfoEXT & operator=( VkMetalSurfaceCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  47951. {
  47952. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT const *>( &rhs );
  47953. return *this;
  47954. }
  47955. MetalSurfaceCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  47956. {
  47957. pNext = pNext_;
  47958. return *this;
  47959. }
  47960. MetalSurfaceCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  47961. {
  47962. flags = flags_;
  47963. return *this;
  47964. }
  47965. MetalSurfaceCreateInfoEXT & setPLayer( const CAMetalLayer* pLayer_ ) VULKAN_HPP_NOEXCEPT
  47966. {
  47967. pLayer = pLayer_;
  47968. return *this;
  47969. }
  47970. operator VkMetalSurfaceCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  47971. {
  47972. return *reinterpret_cast<const VkMetalSurfaceCreateInfoEXT*>( this );
  47973. }
  47974. operator VkMetalSurfaceCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  47975. {
  47976. return *reinterpret_cast<VkMetalSurfaceCreateInfoEXT*>( this );
  47977. }
  47978. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  47979. auto operator<=>( MetalSurfaceCreateInfoEXT const& ) const = default;
  47980. #else
  47981. bool operator==( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47982. {
  47983. return ( sType == rhs.sType )
  47984. && ( pNext == rhs.pNext )
  47985. && ( flags == rhs.flags )
  47986. && ( pLayer == rhs.pLayer );
  47987. }
  47988. bool operator!=( MetalSurfaceCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  47989. {
  47990. return !operator==( rhs );
  47991. }
  47992. #endif
  47993. public:
  47994. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMetalSurfaceCreateInfoEXT;
  47995. const void* pNext = {};
  47996. VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT flags = {};
  47997. const CAMetalLayer* pLayer = {};
  47998. };
  47999. static_assert( sizeof( MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ), "struct and wrapper have different size!" );
  48000. static_assert( std::is_standard_layout<MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  48001. template <>
  48002. struct CppType<StructureType, StructureType::eMetalSurfaceCreateInfoEXT>
  48003. {
  48004. using Type = MetalSurfaceCreateInfoEXT;
  48005. };
  48006. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  48007. struct MutableDescriptorTypeListVALVE
  48008. {
  48009. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48010. VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE(uint32_t descriptorTypeCount_ = {}, const VULKAN_HPP_NAMESPACE::DescriptorType* pDescriptorTypes_ = {}) VULKAN_HPP_NOEXCEPT
  48011. : descriptorTypeCount( descriptorTypeCount_ ), pDescriptorTypes( pDescriptorTypes_ )
  48012. {}
  48013. VULKAN_HPP_CONSTEXPR MutableDescriptorTypeListVALVE( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48014. MutableDescriptorTypeListVALVE( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  48015. : MutableDescriptorTypeListVALVE( *reinterpret_cast<MutableDescriptorTypeListVALVE const *>( &rhs ) )
  48016. {}
  48017. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48018. MutableDescriptorTypeListVALVE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ )
  48019. : descriptorTypeCount( static_cast<uint32_t>( descriptorTypes_.size() ) ), pDescriptorTypes( descriptorTypes_.data() )
  48020. {}
  48021. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48022. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48023. VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeListVALVE & operator=( MutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48024. MutableDescriptorTypeListVALVE & operator=( VkMutableDescriptorTypeListVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  48025. {
  48026. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE const *>( &rhs );
  48027. return *this;
  48028. }
  48029. MutableDescriptorTypeListVALVE & setDescriptorTypeCount( uint32_t descriptorTypeCount_ ) VULKAN_HPP_NOEXCEPT
  48030. {
  48031. descriptorTypeCount = descriptorTypeCount_;
  48032. return *this;
  48033. }
  48034. MutableDescriptorTypeListVALVE & setPDescriptorTypes( const VULKAN_HPP_NAMESPACE::DescriptorType* pDescriptorTypes_ ) VULKAN_HPP_NOEXCEPT
  48035. {
  48036. pDescriptorTypes = pDescriptorTypes_;
  48037. return *this;
  48038. }
  48039. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48040. MutableDescriptorTypeListVALVE & setDescriptorTypes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DescriptorType> const & descriptorTypes_ ) VULKAN_HPP_NOEXCEPT
  48041. {
  48042. descriptorTypeCount = static_cast<uint32_t>( descriptorTypes_.size() );
  48043. pDescriptorTypes = descriptorTypes_.data();
  48044. return *this;
  48045. }
  48046. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48047. operator VkMutableDescriptorTypeListVALVE const&() const VULKAN_HPP_NOEXCEPT
  48048. {
  48049. return *reinterpret_cast<const VkMutableDescriptorTypeListVALVE*>( this );
  48050. }
  48051. operator VkMutableDescriptorTypeListVALVE &() VULKAN_HPP_NOEXCEPT
  48052. {
  48053. return *reinterpret_cast<VkMutableDescriptorTypeListVALVE*>( this );
  48054. }
  48055. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48056. auto operator<=>( MutableDescriptorTypeListVALVE const& ) const = default;
  48057. #else
  48058. bool operator==( MutableDescriptorTypeListVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  48059. {
  48060. return ( descriptorTypeCount == rhs.descriptorTypeCount )
  48061. && ( pDescriptorTypes == rhs.pDescriptorTypes );
  48062. }
  48063. bool operator!=( MutableDescriptorTypeListVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  48064. {
  48065. return !operator==( rhs );
  48066. }
  48067. #endif
  48068. public:
  48069. uint32_t descriptorTypeCount = {};
  48070. const VULKAN_HPP_NAMESPACE::DescriptorType* pDescriptorTypes = {};
  48071. };
  48072. static_assert( sizeof( MutableDescriptorTypeListVALVE ) == sizeof( VkMutableDescriptorTypeListVALVE ), "struct and wrapper have different size!" );
  48073. static_assert( std::is_standard_layout<MutableDescriptorTypeListVALVE>::value, "struct wrapper is not a standard layout!" );
  48074. struct MutableDescriptorTypeCreateInfoVALVE
  48075. {
  48076. static const bool allowDuplicate = false;
  48077. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
  48078. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48079. VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE(uint32_t mutableDescriptorTypeListCount_ = {}, const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists_ = {}) VULKAN_HPP_NOEXCEPT
  48080. : mutableDescriptorTypeListCount( mutableDescriptorTypeListCount_ ), pMutableDescriptorTypeLists( pMutableDescriptorTypeLists_ )
  48081. {}
  48082. VULKAN_HPP_CONSTEXPR MutableDescriptorTypeCreateInfoVALVE( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48083. MutableDescriptorTypeCreateInfoVALVE( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  48084. : MutableDescriptorTypeCreateInfoVALVE( *reinterpret_cast<MutableDescriptorTypeCreateInfoVALVE const *>( &rhs ) )
  48085. {}
  48086. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48087. MutableDescriptorTypeCreateInfoVALVE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const & mutableDescriptorTypeLists_ )
  48088. : mutableDescriptorTypeListCount( static_cast<uint32_t>( mutableDescriptorTypeLists_.size() ) ), pMutableDescriptorTypeLists( mutableDescriptorTypeLists_.data() )
  48089. {}
  48090. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48091. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48092. VULKAN_HPP_CONSTEXPR_14 MutableDescriptorTypeCreateInfoVALVE & operator=( MutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48093. MutableDescriptorTypeCreateInfoVALVE & operator=( VkMutableDescriptorTypeCreateInfoVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  48094. {
  48095. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE const *>( &rhs );
  48096. return *this;
  48097. }
  48098. MutableDescriptorTypeCreateInfoVALVE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48099. {
  48100. pNext = pNext_;
  48101. return *this;
  48102. }
  48103. MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeListCount( uint32_t mutableDescriptorTypeListCount_ ) VULKAN_HPP_NOEXCEPT
  48104. {
  48105. mutableDescriptorTypeListCount = mutableDescriptorTypeListCount_;
  48106. return *this;
  48107. }
  48108. MutableDescriptorTypeCreateInfoVALVE & setPMutableDescriptorTypeLists( const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
  48109. {
  48110. pMutableDescriptorTypeLists = pMutableDescriptorTypeLists_;
  48111. return *this;
  48112. }
  48113. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48114. MutableDescriptorTypeCreateInfoVALVE & setMutableDescriptorTypeLists( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE> const & mutableDescriptorTypeLists_ ) VULKAN_HPP_NOEXCEPT
  48115. {
  48116. mutableDescriptorTypeListCount = static_cast<uint32_t>( mutableDescriptorTypeLists_.size() );
  48117. pMutableDescriptorTypeLists = mutableDescriptorTypeLists_.data();
  48118. return *this;
  48119. }
  48120. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  48121. operator VkMutableDescriptorTypeCreateInfoVALVE const&() const VULKAN_HPP_NOEXCEPT
  48122. {
  48123. return *reinterpret_cast<const VkMutableDescriptorTypeCreateInfoVALVE*>( this );
  48124. }
  48125. operator VkMutableDescriptorTypeCreateInfoVALVE &() VULKAN_HPP_NOEXCEPT
  48126. {
  48127. return *reinterpret_cast<VkMutableDescriptorTypeCreateInfoVALVE*>( this );
  48128. }
  48129. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48130. auto operator<=>( MutableDescriptorTypeCreateInfoVALVE const& ) const = default;
  48131. #else
  48132. bool operator==( MutableDescriptorTypeCreateInfoVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  48133. {
  48134. return ( sType == rhs.sType )
  48135. && ( pNext == rhs.pNext )
  48136. && ( mutableDescriptorTypeListCount == rhs.mutableDescriptorTypeListCount )
  48137. && ( pMutableDescriptorTypeLists == rhs.pMutableDescriptorTypeLists );
  48138. }
  48139. bool operator!=( MutableDescriptorTypeCreateInfoVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  48140. {
  48141. return !operator==( rhs );
  48142. }
  48143. #endif
  48144. public:
  48145. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eMutableDescriptorTypeCreateInfoVALVE;
  48146. const void* pNext = {};
  48147. uint32_t mutableDescriptorTypeListCount = {};
  48148. const VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE* pMutableDescriptorTypeLists = {};
  48149. };
  48150. static_assert( sizeof( MutableDescriptorTypeCreateInfoVALVE ) == sizeof( VkMutableDescriptorTypeCreateInfoVALVE ), "struct and wrapper have different size!" );
  48151. static_assert( std::is_standard_layout<MutableDescriptorTypeCreateInfoVALVE>::value, "struct wrapper is not a standard layout!" );
  48152. template <>
  48153. struct CppType<StructureType, StructureType::eMutableDescriptorTypeCreateInfoVALVE>
  48154. {
  48155. using Type = MutableDescriptorTypeCreateInfoVALVE;
  48156. };
  48157. union PerformanceCounterResultKHR
  48158. {
  48159. PerformanceCounterResultKHR( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const& rhs ) VULKAN_HPP_NOEXCEPT
  48160. {
  48161. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
  48162. }
  48163. PerformanceCounterResultKHR( int32_t int32_ = {} )
  48164. : int32( int32_ )
  48165. {}
  48166. PerformanceCounterResultKHR( int64_t int64_ )
  48167. : int64( int64_ )
  48168. {}
  48169. PerformanceCounterResultKHR( uint32_t uint32_ )
  48170. : uint32( uint32_ )
  48171. {}
  48172. PerformanceCounterResultKHR( uint64_t uint64_ )
  48173. : uint64( uint64_ )
  48174. {}
  48175. PerformanceCounterResultKHR( float float32_ )
  48176. : float32( float32_ )
  48177. {}
  48178. PerformanceCounterResultKHR( double float64_ )
  48179. : float64( float64_ )
  48180. {}
  48181. PerformanceCounterResultKHR & setInt32( int32_t int32_ ) VULKAN_HPP_NOEXCEPT
  48182. {
  48183. int32 = int32_;
  48184. return *this;
  48185. }
  48186. PerformanceCounterResultKHR & setInt64( int64_t int64_ ) VULKAN_HPP_NOEXCEPT
  48187. {
  48188. int64 = int64_;
  48189. return *this;
  48190. }
  48191. PerformanceCounterResultKHR & setUint32( uint32_t uint32_ ) VULKAN_HPP_NOEXCEPT
  48192. {
  48193. uint32 = uint32_;
  48194. return *this;
  48195. }
  48196. PerformanceCounterResultKHR & setUint64( uint64_t uint64_ ) VULKAN_HPP_NOEXCEPT
  48197. {
  48198. uint64 = uint64_;
  48199. return *this;
  48200. }
  48201. PerformanceCounterResultKHR & setFloat32( float float32_ ) VULKAN_HPP_NOEXCEPT
  48202. {
  48203. float32 = float32_;
  48204. return *this;
  48205. }
  48206. PerformanceCounterResultKHR & setFloat64( double float64_ ) VULKAN_HPP_NOEXCEPT
  48207. {
  48208. float64 = float64_;
  48209. return *this;
  48210. }
  48211. VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR & operator=( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48212. {
  48213. memcpy( static_cast<void*>(this), &rhs, sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) );
  48214. return *this;
  48215. }
  48216. operator VkPerformanceCounterResultKHR const&() const
  48217. {
  48218. return *reinterpret_cast<const VkPerformanceCounterResultKHR*>(this);
  48219. }
  48220. operator VkPerformanceCounterResultKHR &()
  48221. {
  48222. return *reinterpret_cast<VkPerformanceCounterResultKHR*>(this);
  48223. }
  48224. int32_t int32;
  48225. int64_t int64;
  48226. uint32_t uint32;
  48227. uint64_t uint64;
  48228. float float32;
  48229. double float64;
  48230. };
  48231. struct PerformanceQuerySubmitInfoKHR
  48232. {
  48233. static const bool allowDuplicate = false;
  48234. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePerformanceQuerySubmitInfoKHR;
  48235. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48236. VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR(uint32_t counterPassIndex_ = {}) VULKAN_HPP_NOEXCEPT
  48237. : counterPassIndex( counterPassIndex_ )
  48238. {}
  48239. VULKAN_HPP_CONSTEXPR PerformanceQuerySubmitInfoKHR( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48240. PerformanceQuerySubmitInfoKHR( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48241. : PerformanceQuerySubmitInfoKHR( *reinterpret_cast<PerformanceQuerySubmitInfoKHR const *>( &rhs ) )
  48242. {}
  48243. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48244. VULKAN_HPP_CONSTEXPR_14 PerformanceQuerySubmitInfoKHR & operator=( PerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48245. PerformanceQuerySubmitInfoKHR & operator=( VkPerformanceQuerySubmitInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48246. {
  48247. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR const *>( &rhs );
  48248. return *this;
  48249. }
  48250. PerformanceQuerySubmitInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48251. {
  48252. pNext = pNext_;
  48253. return *this;
  48254. }
  48255. PerformanceQuerySubmitInfoKHR & setCounterPassIndex( uint32_t counterPassIndex_ ) VULKAN_HPP_NOEXCEPT
  48256. {
  48257. counterPassIndex = counterPassIndex_;
  48258. return *this;
  48259. }
  48260. operator VkPerformanceQuerySubmitInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  48261. {
  48262. return *reinterpret_cast<const VkPerformanceQuerySubmitInfoKHR*>( this );
  48263. }
  48264. operator VkPerformanceQuerySubmitInfoKHR &() VULKAN_HPP_NOEXCEPT
  48265. {
  48266. return *reinterpret_cast<VkPerformanceQuerySubmitInfoKHR*>( this );
  48267. }
  48268. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48269. auto operator<=>( PerformanceQuerySubmitInfoKHR const& ) const = default;
  48270. #else
  48271. bool operator==( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48272. {
  48273. return ( sType == rhs.sType )
  48274. && ( pNext == rhs.pNext )
  48275. && ( counterPassIndex == rhs.counterPassIndex );
  48276. }
  48277. bool operator!=( PerformanceQuerySubmitInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48278. {
  48279. return !operator==( rhs );
  48280. }
  48281. #endif
  48282. public:
  48283. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePerformanceQuerySubmitInfoKHR;
  48284. const void* pNext = {};
  48285. uint32_t counterPassIndex = {};
  48286. };
  48287. static_assert( sizeof( PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ), "struct and wrapper have different size!" );
  48288. static_assert( std::is_standard_layout<PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
  48289. template <>
  48290. struct CppType<StructureType, StructureType::ePerformanceQuerySubmitInfoKHR>
  48291. {
  48292. using Type = PerformanceQuerySubmitInfoKHR;
  48293. };
  48294. struct PhysicalDevice16BitStorageFeatures
  48295. {
  48296. static const bool allowDuplicate = false;
  48297. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice16BitStorageFeatures;
  48298. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48299. VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}) VULKAN_HPP_NOEXCEPT
  48300. : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ )
  48301. {}
  48302. VULKAN_HPP_CONSTEXPR PhysicalDevice16BitStorageFeatures( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48303. PhysicalDevice16BitStorageFeatures( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48304. : PhysicalDevice16BitStorageFeatures( *reinterpret_cast<PhysicalDevice16BitStorageFeatures const *>( &rhs ) )
  48305. {}
  48306. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48307. VULKAN_HPP_CONSTEXPR_14 PhysicalDevice16BitStorageFeatures & operator=( PhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48308. PhysicalDevice16BitStorageFeatures & operator=( VkPhysicalDevice16BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48309. {
  48310. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures const *>( &rhs );
  48311. return *this;
  48312. }
  48313. PhysicalDevice16BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48314. {
  48315. pNext = pNext_;
  48316. return *this;
  48317. }
  48318. PhysicalDevice16BitStorageFeatures & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
  48319. {
  48320. storageBuffer16BitAccess = storageBuffer16BitAccess_;
  48321. return *this;
  48322. }
  48323. PhysicalDevice16BitStorageFeatures & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
  48324. {
  48325. uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
  48326. return *this;
  48327. }
  48328. PhysicalDevice16BitStorageFeatures & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
  48329. {
  48330. storagePushConstant16 = storagePushConstant16_;
  48331. return *this;
  48332. }
  48333. PhysicalDevice16BitStorageFeatures & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
  48334. {
  48335. storageInputOutput16 = storageInputOutput16_;
  48336. return *this;
  48337. }
  48338. operator VkPhysicalDevice16BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
  48339. {
  48340. return *reinterpret_cast<const VkPhysicalDevice16BitStorageFeatures*>( this );
  48341. }
  48342. operator VkPhysicalDevice16BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
  48343. {
  48344. return *reinterpret_cast<VkPhysicalDevice16BitStorageFeatures*>( this );
  48345. }
  48346. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48347. auto operator<=>( PhysicalDevice16BitStorageFeatures const& ) const = default;
  48348. #else
  48349. bool operator==( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48350. {
  48351. return ( sType == rhs.sType )
  48352. && ( pNext == rhs.pNext )
  48353. && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
  48354. && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
  48355. && ( storagePushConstant16 == rhs.storagePushConstant16 )
  48356. && ( storageInputOutput16 == rhs.storageInputOutput16 );
  48357. }
  48358. bool operator!=( PhysicalDevice16BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48359. {
  48360. return !operator==( rhs );
  48361. }
  48362. #endif
  48363. public:
  48364. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice16BitStorageFeatures;
  48365. void* pNext = {};
  48366. VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
  48367. VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
  48368. VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
  48369. VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
  48370. };
  48371. static_assert( sizeof( PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ), "struct and wrapper have different size!" );
  48372. static_assert( std::is_standard_layout<PhysicalDevice16BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
  48373. template <>
  48374. struct CppType<StructureType, StructureType::ePhysicalDevice16BitStorageFeatures>
  48375. {
  48376. using Type = PhysicalDevice16BitStorageFeatures;
  48377. };
  48378. using PhysicalDevice16BitStorageFeaturesKHR = PhysicalDevice16BitStorageFeatures;
  48379. struct PhysicalDevice4444FormatsFeaturesEXT
  48380. {
  48381. static const bool allowDuplicate = false;
  48382. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
  48383. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48384. VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ = {}, VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ = {}) VULKAN_HPP_NOEXCEPT
  48385. : formatA4R4G4B4( formatA4R4G4B4_ ), formatA4B4G4R4( formatA4B4G4R4_ )
  48386. {}
  48387. VULKAN_HPP_CONSTEXPR PhysicalDevice4444FormatsFeaturesEXT( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48388. PhysicalDevice4444FormatsFeaturesEXT( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48389. : PhysicalDevice4444FormatsFeaturesEXT( *reinterpret_cast<PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs ) )
  48390. {}
  48391. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48392. VULKAN_HPP_CONSTEXPR_14 PhysicalDevice4444FormatsFeaturesEXT & operator=( PhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48393. PhysicalDevice4444FormatsFeaturesEXT & operator=( VkPhysicalDevice4444FormatsFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48394. {
  48395. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT const *>( &rhs );
  48396. return *this;
  48397. }
  48398. PhysicalDevice4444FormatsFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48399. {
  48400. pNext = pNext_;
  48401. return *this;
  48402. }
  48403. PhysicalDevice4444FormatsFeaturesEXT & setFormatA4R4G4B4( VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4_ ) VULKAN_HPP_NOEXCEPT
  48404. {
  48405. formatA4R4G4B4 = formatA4R4G4B4_;
  48406. return *this;
  48407. }
  48408. PhysicalDevice4444FormatsFeaturesEXT & setFormatA4B4G4R4( VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4_ ) VULKAN_HPP_NOEXCEPT
  48409. {
  48410. formatA4B4G4R4 = formatA4B4G4R4_;
  48411. return *this;
  48412. }
  48413. operator VkPhysicalDevice4444FormatsFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  48414. {
  48415. return *reinterpret_cast<const VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
  48416. }
  48417. operator VkPhysicalDevice4444FormatsFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  48418. {
  48419. return *reinterpret_cast<VkPhysicalDevice4444FormatsFeaturesEXT*>( this );
  48420. }
  48421. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48422. auto operator<=>( PhysicalDevice4444FormatsFeaturesEXT const& ) const = default;
  48423. #else
  48424. bool operator==( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48425. {
  48426. return ( sType == rhs.sType )
  48427. && ( pNext == rhs.pNext )
  48428. && ( formatA4R4G4B4 == rhs.formatA4R4G4B4 )
  48429. && ( formatA4B4G4R4 == rhs.formatA4B4G4R4 );
  48430. }
  48431. bool operator!=( PhysicalDevice4444FormatsFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48432. {
  48433. return !operator==( rhs );
  48434. }
  48435. #endif
  48436. public:
  48437. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice4444FormatsFeaturesEXT;
  48438. void* pNext = {};
  48439. VULKAN_HPP_NAMESPACE::Bool32 formatA4R4G4B4 = {};
  48440. VULKAN_HPP_NAMESPACE::Bool32 formatA4B4G4R4 = {};
  48441. };
  48442. static_assert( sizeof( PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ), "struct and wrapper have different size!" );
  48443. static_assert( std::is_standard_layout<PhysicalDevice4444FormatsFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  48444. template <>
  48445. struct CppType<StructureType, StructureType::ePhysicalDevice4444FormatsFeaturesEXT>
  48446. {
  48447. using Type = PhysicalDevice4444FormatsFeaturesEXT;
  48448. };
  48449. struct PhysicalDevice8BitStorageFeatures
  48450. {
  48451. static const bool allowDuplicate = false;
  48452. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevice8BitStorageFeatures;
  48453. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48454. VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}) VULKAN_HPP_NOEXCEPT
  48455. : storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ )
  48456. {}
  48457. VULKAN_HPP_CONSTEXPR PhysicalDevice8BitStorageFeatures( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48458. PhysicalDevice8BitStorageFeatures( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48459. : PhysicalDevice8BitStorageFeatures( *reinterpret_cast<PhysicalDevice8BitStorageFeatures const *>( &rhs ) )
  48460. {}
  48461. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48462. VULKAN_HPP_CONSTEXPR_14 PhysicalDevice8BitStorageFeatures & operator=( PhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48463. PhysicalDevice8BitStorageFeatures & operator=( VkPhysicalDevice8BitStorageFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48464. {
  48465. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures const *>( &rhs );
  48466. return *this;
  48467. }
  48468. PhysicalDevice8BitStorageFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48469. {
  48470. pNext = pNext_;
  48471. return *this;
  48472. }
  48473. PhysicalDevice8BitStorageFeatures & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
  48474. {
  48475. storageBuffer8BitAccess = storageBuffer8BitAccess_;
  48476. return *this;
  48477. }
  48478. PhysicalDevice8BitStorageFeatures & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
  48479. {
  48480. uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
  48481. return *this;
  48482. }
  48483. PhysicalDevice8BitStorageFeatures & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
  48484. {
  48485. storagePushConstant8 = storagePushConstant8_;
  48486. return *this;
  48487. }
  48488. operator VkPhysicalDevice8BitStorageFeatures const&() const VULKAN_HPP_NOEXCEPT
  48489. {
  48490. return *reinterpret_cast<const VkPhysicalDevice8BitStorageFeatures*>( this );
  48491. }
  48492. operator VkPhysicalDevice8BitStorageFeatures &() VULKAN_HPP_NOEXCEPT
  48493. {
  48494. return *reinterpret_cast<VkPhysicalDevice8BitStorageFeatures*>( this );
  48495. }
  48496. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48497. auto operator<=>( PhysicalDevice8BitStorageFeatures const& ) const = default;
  48498. #else
  48499. bool operator==( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48500. {
  48501. return ( sType == rhs.sType )
  48502. && ( pNext == rhs.pNext )
  48503. && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
  48504. && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
  48505. && ( storagePushConstant8 == rhs.storagePushConstant8 );
  48506. }
  48507. bool operator!=( PhysicalDevice8BitStorageFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48508. {
  48509. return !operator==( rhs );
  48510. }
  48511. #endif
  48512. public:
  48513. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevice8BitStorageFeatures;
  48514. void* pNext = {};
  48515. VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
  48516. VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
  48517. VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
  48518. };
  48519. static_assert( sizeof( PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ), "struct and wrapper have different size!" );
  48520. static_assert( std::is_standard_layout<PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
  48521. template <>
  48522. struct CppType<StructureType, StructureType::ePhysicalDevice8BitStorageFeatures>
  48523. {
  48524. using Type = PhysicalDevice8BitStorageFeatures;
  48525. };
  48526. using PhysicalDevice8BitStorageFeaturesKHR = PhysicalDevice8BitStorageFeatures;
  48527. struct PhysicalDeviceASTCDecodeFeaturesEXT
  48528. {
  48529. static const bool allowDuplicate = false;
  48530. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
  48531. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48532. VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ = {}) VULKAN_HPP_NOEXCEPT
  48533. : decodeModeSharedExponent( decodeModeSharedExponent_ )
  48534. {}
  48535. VULKAN_HPP_CONSTEXPR PhysicalDeviceASTCDecodeFeaturesEXT( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48536. PhysicalDeviceASTCDecodeFeaturesEXT( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48537. : PhysicalDeviceASTCDecodeFeaturesEXT( *reinterpret_cast<PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs ) )
  48538. {}
  48539. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48540. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceASTCDecodeFeaturesEXT & operator=( PhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48541. PhysicalDeviceASTCDecodeFeaturesEXT & operator=( VkPhysicalDeviceASTCDecodeFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48542. {
  48543. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT const *>( &rhs );
  48544. return *this;
  48545. }
  48546. PhysicalDeviceASTCDecodeFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48547. {
  48548. pNext = pNext_;
  48549. return *this;
  48550. }
  48551. PhysicalDeviceASTCDecodeFeaturesEXT & setDecodeModeSharedExponent( VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent_ ) VULKAN_HPP_NOEXCEPT
  48552. {
  48553. decodeModeSharedExponent = decodeModeSharedExponent_;
  48554. return *this;
  48555. }
  48556. operator VkPhysicalDeviceASTCDecodeFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  48557. {
  48558. return *reinterpret_cast<const VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
  48559. }
  48560. operator VkPhysicalDeviceASTCDecodeFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  48561. {
  48562. return *reinterpret_cast<VkPhysicalDeviceASTCDecodeFeaturesEXT*>( this );
  48563. }
  48564. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48565. auto operator<=>( PhysicalDeviceASTCDecodeFeaturesEXT const& ) const = default;
  48566. #else
  48567. bool operator==( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48568. {
  48569. return ( sType == rhs.sType )
  48570. && ( pNext == rhs.pNext )
  48571. && ( decodeModeSharedExponent == rhs.decodeModeSharedExponent );
  48572. }
  48573. bool operator!=( PhysicalDeviceASTCDecodeFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48574. {
  48575. return !operator==( rhs );
  48576. }
  48577. #endif
  48578. public:
  48579. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT;
  48580. void* pNext = {};
  48581. VULKAN_HPP_NAMESPACE::Bool32 decodeModeSharedExponent = {};
  48582. };
  48583. static_assert( sizeof( PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ), "struct and wrapper have different size!" );
  48584. static_assert( std::is_standard_layout<PhysicalDeviceASTCDecodeFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  48585. template <>
  48586. struct CppType<StructureType, StructureType::ePhysicalDeviceAstcDecodeFeaturesEXT>
  48587. {
  48588. using Type = PhysicalDeviceASTCDecodeFeaturesEXT;
  48589. };
  48590. struct PhysicalDeviceAccelerationStructureFeaturesKHR
  48591. {
  48592. static const bool allowDuplicate = false;
  48593. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
  48594. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48595. VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ = {}, VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
  48596. : accelerationStructure( accelerationStructure_ ), accelerationStructureCaptureReplay( accelerationStructureCaptureReplay_ ), accelerationStructureIndirectBuild( accelerationStructureIndirectBuild_ ), accelerationStructureHostCommands( accelerationStructureHostCommands_ ), descriptorBindingAccelerationStructureUpdateAfterBind( descriptorBindingAccelerationStructureUpdateAfterBind_ )
  48597. {}
  48598. VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructureFeaturesKHR( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48599. PhysicalDeviceAccelerationStructureFeaturesKHR( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48600. : PhysicalDeviceAccelerationStructureFeaturesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs ) )
  48601. {}
  48602. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48603. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( PhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48604. PhysicalDeviceAccelerationStructureFeaturesKHR & operator=( VkPhysicalDeviceAccelerationStructureFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48605. {
  48606. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR const *>( &rhs );
  48607. return *this;
  48608. }
  48609. PhysicalDeviceAccelerationStructureFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48610. {
  48611. pNext = pNext_;
  48612. return *this;
  48613. }
  48614. PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructure( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure_ ) VULKAN_HPP_NOEXCEPT
  48615. {
  48616. accelerationStructure = accelerationStructure_;
  48617. return *this;
  48618. }
  48619. PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
  48620. {
  48621. accelerationStructureCaptureReplay = accelerationStructureCaptureReplay_;
  48622. return *this;
  48623. }
  48624. PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureIndirectBuild( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild_ ) VULKAN_HPP_NOEXCEPT
  48625. {
  48626. accelerationStructureIndirectBuild = accelerationStructureIndirectBuild_;
  48627. return *this;
  48628. }
  48629. PhysicalDeviceAccelerationStructureFeaturesKHR & setAccelerationStructureHostCommands( VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands_ ) VULKAN_HPP_NOEXCEPT
  48630. {
  48631. accelerationStructureHostCommands = accelerationStructureHostCommands_;
  48632. return *this;
  48633. }
  48634. PhysicalDeviceAccelerationStructureFeaturesKHR & setDescriptorBindingAccelerationStructureUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  48635. {
  48636. descriptorBindingAccelerationStructureUpdateAfterBind = descriptorBindingAccelerationStructureUpdateAfterBind_;
  48637. return *this;
  48638. }
  48639. operator VkPhysicalDeviceAccelerationStructureFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  48640. {
  48641. return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
  48642. }
  48643. operator VkPhysicalDeviceAccelerationStructureFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  48644. {
  48645. return *reinterpret_cast<VkPhysicalDeviceAccelerationStructureFeaturesKHR*>( this );
  48646. }
  48647. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48648. auto operator<=>( PhysicalDeviceAccelerationStructureFeaturesKHR const& ) const = default;
  48649. #else
  48650. bool operator==( PhysicalDeviceAccelerationStructureFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48651. {
  48652. return ( sType == rhs.sType )
  48653. && ( pNext == rhs.pNext )
  48654. && ( accelerationStructure == rhs.accelerationStructure )
  48655. && ( accelerationStructureCaptureReplay == rhs.accelerationStructureCaptureReplay )
  48656. && ( accelerationStructureIndirectBuild == rhs.accelerationStructureIndirectBuild )
  48657. && ( accelerationStructureHostCommands == rhs.accelerationStructureHostCommands )
  48658. && ( descriptorBindingAccelerationStructureUpdateAfterBind == rhs.descriptorBindingAccelerationStructureUpdateAfterBind );
  48659. }
  48660. bool operator!=( PhysicalDeviceAccelerationStructureFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48661. {
  48662. return !operator==( rhs );
  48663. }
  48664. #endif
  48665. public:
  48666. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR;
  48667. void* pNext = {};
  48668. VULKAN_HPP_NAMESPACE::Bool32 accelerationStructure = {};
  48669. VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureCaptureReplay = {};
  48670. VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureIndirectBuild = {};
  48671. VULKAN_HPP_NAMESPACE::Bool32 accelerationStructureHostCommands = {};
  48672. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingAccelerationStructureUpdateAfterBind = {};
  48673. };
  48674. static_assert( sizeof( PhysicalDeviceAccelerationStructureFeaturesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ), "struct and wrapper have different size!" );
  48675. static_assert( std::is_standard_layout<PhysicalDeviceAccelerationStructureFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  48676. template <>
  48677. struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructureFeaturesKHR>
  48678. {
  48679. using Type = PhysicalDeviceAccelerationStructureFeaturesKHR;
  48680. };
  48681. struct PhysicalDeviceAccelerationStructurePropertiesKHR
  48682. {
  48683. static const bool allowDuplicate = false;
  48684. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
  48685. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48686. VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR(uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxPrimitiveCount_ = {}, uint32_t maxPerStageDescriptorAccelerationStructures_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}, uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures_ = {}, uint32_t minAccelerationStructureScratchOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  48687. : maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxPrimitiveCount( maxPrimitiveCount_ ), maxPerStageDescriptorAccelerationStructures( maxPerStageDescriptorAccelerationStructures_ ), maxPerStageDescriptorUpdateAfterBindAccelerationStructures( maxPerStageDescriptorUpdateAfterBindAccelerationStructures_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ ), maxDescriptorSetUpdateAfterBindAccelerationStructures( maxDescriptorSetUpdateAfterBindAccelerationStructures_ ), minAccelerationStructureScratchOffsetAlignment( minAccelerationStructureScratchOffsetAlignment_ )
  48688. {}
  48689. VULKAN_HPP_CONSTEXPR PhysicalDeviceAccelerationStructurePropertiesKHR( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48690. PhysicalDeviceAccelerationStructurePropertiesKHR( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48691. : PhysicalDeviceAccelerationStructurePropertiesKHR( *reinterpret_cast<PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs ) )
  48692. {}
  48693. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48694. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( PhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48695. PhysicalDeviceAccelerationStructurePropertiesKHR & operator=( VkPhysicalDeviceAccelerationStructurePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  48696. {
  48697. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR const *>( &rhs );
  48698. return *this;
  48699. }
  48700. operator VkPhysicalDeviceAccelerationStructurePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  48701. {
  48702. return *reinterpret_cast<const VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
  48703. }
  48704. operator VkPhysicalDeviceAccelerationStructurePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  48705. {
  48706. return *reinterpret_cast<VkPhysicalDeviceAccelerationStructurePropertiesKHR*>( this );
  48707. }
  48708. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48709. auto operator<=>( PhysicalDeviceAccelerationStructurePropertiesKHR const& ) const = default;
  48710. #else
  48711. bool operator==( PhysicalDeviceAccelerationStructurePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48712. {
  48713. return ( sType == rhs.sType )
  48714. && ( pNext == rhs.pNext )
  48715. && ( maxGeometryCount == rhs.maxGeometryCount )
  48716. && ( maxInstanceCount == rhs.maxInstanceCount )
  48717. && ( maxPrimitiveCount == rhs.maxPrimitiveCount )
  48718. && ( maxPerStageDescriptorAccelerationStructures == rhs.maxPerStageDescriptorAccelerationStructures )
  48719. && ( maxPerStageDescriptorUpdateAfterBindAccelerationStructures == rhs.maxPerStageDescriptorUpdateAfterBindAccelerationStructures )
  48720. && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures )
  48721. && ( maxDescriptorSetUpdateAfterBindAccelerationStructures == rhs.maxDescriptorSetUpdateAfterBindAccelerationStructures )
  48722. && ( minAccelerationStructureScratchOffsetAlignment == rhs.minAccelerationStructureScratchOffsetAlignment );
  48723. }
  48724. bool operator!=( PhysicalDeviceAccelerationStructurePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  48725. {
  48726. return !operator==( rhs );
  48727. }
  48728. #endif
  48729. public:
  48730. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR;
  48731. void* pNext = {};
  48732. uint64_t maxGeometryCount = {};
  48733. uint64_t maxInstanceCount = {};
  48734. uint64_t maxPrimitiveCount = {};
  48735. uint32_t maxPerStageDescriptorAccelerationStructures = {};
  48736. uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures = {};
  48737. uint32_t maxDescriptorSetAccelerationStructures = {};
  48738. uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures = {};
  48739. uint32_t minAccelerationStructureScratchOffsetAlignment = {};
  48740. };
  48741. static_assert( sizeof( PhysicalDeviceAccelerationStructurePropertiesKHR ) == sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ), "struct and wrapper have different size!" );
  48742. static_assert( std::is_standard_layout<PhysicalDeviceAccelerationStructurePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  48743. template <>
  48744. struct CppType<StructureType, StructureType::ePhysicalDeviceAccelerationStructurePropertiesKHR>
  48745. {
  48746. using Type = PhysicalDeviceAccelerationStructurePropertiesKHR;
  48747. };
  48748. struct PhysicalDeviceBlendOperationAdvancedFeaturesEXT
  48749. {
  48750. static const bool allowDuplicate = false;
  48751. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  48752. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48753. VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ = {}) VULKAN_HPP_NOEXCEPT
  48754. : advancedBlendCoherentOperations( advancedBlendCoherentOperations_ )
  48755. {}
  48756. VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedFeaturesEXT( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48757. PhysicalDeviceBlendOperationAdvancedFeaturesEXT( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48758. : PhysicalDeviceBlendOperationAdvancedFeaturesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs ) )
  48759. {}
  48760. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48761. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48762. PhysicalDeviceBlendOperationAdvancedFeaturesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48763. {
  48764. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT const *>( &rhs );
  48765. return *this;
  48766. }
  48767. PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48768. {
  48769. pNext = pNext_;
  48770. return *this;
  48771. }
  48772. PhysicalDeviceBlendOperationAdvancedFeaturesEXT & setAdvancedBlendCoherentOperations( VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations_ ) VULKAN_HPP_NOEXCEPT
  48773. {
  48774. advancedBlendCoherentOperations = advancedBlendCoherentOperations_;
  48775. return *this;
  48776. }
  48777. operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  48778. {
  48779. return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
  48780. }
  48781. operator VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  48782. {
  48783. return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT*>( this );
  48784. }
  48785. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48786. auto operator<=>( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& ) const = default;
  48787. #else
  48788. bool operator==( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48789. {
  48790. return ( sType == rhs.sType )
  48791. && ( pNext == rhs.pNext )
  48792. && ( advancedBlendCoherentOperations == rhs.advancedBlendCoherentOperations );
  48793. }
  48794. bool operator!=( PhysicalDeviceBlendOperationAdvancedFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48795. {
  48796. return !operator==( rhs );
  48797. }
  48798. #endif
  48799. public:
  48800. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  48801. void* pNext = {};
  48802. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCoherentOperations = {};
  48803. };
  48804. static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ), "struct and wrapper have different size!" );
  48805. static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  48806. template <>
  48807. struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedFeaturesEXT>
  48808. {
  48809. using Type = PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
  48810. };
  48811. struct PhysicalDeviceBlendOperationAdvancedPropertiesEXT
  48812. {
  48813. static const bool allowDuplicate = false;
  48814. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  48815. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48816. VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT(uint32_t advancedBlendMaxColorAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations_ = {}) VULKAN_HPP_NOEXCEPT
  48817. : advancedBlendMaxColorAttachments( advancedBlendMaxColorAttachments_ ), advancedBlendIndependentBlend( advancedBlendIndependentBlend_ ), advancedBlendNonPremultipliedSrcColor( advancedBlendNonPremultipliedSrcColor_ ), advancedBlendNonPremultipliedDstColor( advancedBlendNonPremultipliedDstColor_ ), advancedBlendCorrelatedOverlap( advancedBlendCorrelatedOverlap_ ), advancedBlendAllOperations( advancedBlendAllOperations_ )
  48818. {}
  48819. VULKAN_HPP_CONSTEXPR PhysicalDeviceBlendOperationAdvancedPropertiesEXT( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48820. PhysicalDeviceBlendOperationAdvancedPropertiesEXT( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48821. : PhysicalDeviceBlendOperationAdvancedPropertiesEXT( *reinterpret_cast<PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs ) )
  48822. {}
  48823. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48824. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48825. PhysicalDeviceBlendOperationAdvancedPropertiesEXT & operator=( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48826. {
  48827. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT const *>( &rhs );
  48828. return *this;
  48829. }
  48830. operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  48831. {
  48832. return *reinterpret_cast<const VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
  48833. }
  48834. operator VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  48835. {
  48836. return *reinterpret_cast<VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT*>( this );
  48837. }
  48838. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48839. auto operator<=>( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& ) const = default;
  48840. #else
  48841. bool operator==( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48842. {
  48843. return ( sType == rhs.sType )
  48844. && ( pNext == rhs.pNext )
  48845. && ( advancedBlendMaxColorAttachments == rhs.advancedBlendMaxColorAttachments )
  48846. && ( advancedBlendIndependentBlend == rhs.advancedBlendIndependentBlend )
  48847. && ( advancedBlendNonPremultipliedSrcColor == rhs.advancedBlendNonPremultipliedSrcColor )
  48848. && ( advancedBlendNonPremultipliedDstColor == rhs.advancedBlendNonPremultipliedDstColor )
  48849. && ( advancedBlendCorrelatedOverlap == rhs.advancedBlendCorrelatedOverlap )
  48850. && ( advancedBlendAllOperations == rhs.advancedBlendAllOperations );
  48851. }
  48852. bool operator!=( PhysicalDeviceBlendOperationAdvancedPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  48853. {
  48854. return !operator==( rhs );
  48855. }
  48856. #endif
  48857. public:
  48858. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  48859. void* pNext = {};
  48860. uint32_t advancedBlendMaxColorAttachments = {};
  48861. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendIndependentBlend = {};
  48862. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedSrcColor = {};
  48863. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendNonPremultipliedDstColor = {};
  48864. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendCorrelatedOverlap = {};
  48865. VULKAN_HPP_NAMESPACE::Bool32 advancedBlendAllOperations = {};
  48866. };
  48867. static_assert( sizeof( PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) == sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ), "struct and wrapper have different size!" );
  48868. static_assert( std::is_standard_layout<PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  48869. template <>
  48870. struct CppType<StructureType, StructureType::ePhysicalDeviceBlendOperationAdvancedPropertiesEXT>
  48871. {
  48872. using Type = PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
  48873. };
  48874. struct PhysicalDeviceBufferDeviceAddressFeatures
  48875. {
  48876. static const bool allowDuplicate = false;
  48877. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
  48878. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48879. VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
  48880. : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
  48881. {}
  48882. VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeatures( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48883. PhysicalDeviceBufferDeviceAddressFeatures( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48884. : PhysicalDeviceBufferDeviceAddressFeatures( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs ) )
  48885. {}
  48886. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48887. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeatures & operator=( PhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48888. PhysicalDeviceBufferDeviceAddressFeatures & operator=( VkPhysicalDeviceBufferDeviceAddressFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  48889. {
  48890. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures const *>( &rhs );
  48891. return *this;
  48892. }
  48893. PhysicalDeviceBufferDeviceAddressFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48894. {
  48895. pNext = pNext_;
  48896. return *this;
  48897. }
  48898. PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
  48899. {
  48900. bufferDeviceAddress = bufferDeviceAddress_;
  48901. return *this;
  48902. }
  48903. PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
  48904. {
  48905. bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
  48906. return *this;
  48907. }
  48908. PhysicalDeviceBufferDeviceAddressFeatures & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
  48909. {
  48910. bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
  48911. return *this;
  48912. }
  48913. operator VkPhysicalDeviceBufferDeviceAddressFeatures const&() const VULKAN_HPP_NOEXCEPT
  48914. {
  48915. return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
  48916. }
  48917. operator VkPhysicalDeviceBufferDeviceAddressFeatures &() VULKAN_HPP_NOEXCEPT
  48918. {
  48919. return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeatures*>( this );
  48920. }
  48921. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  48922. auto operator<=>( PhysicalDeviceBufferDeviceAddressFeatures const& ) const = default;
  48923. #else
  48924. bool operator==( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48925. {
  48926. return ( sType == rhs.sType )
  48927. && ( pNext == rhs.pNext )
  48928. && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
  48929. && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
  48930. && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
  48931. }
  48932. bool operator!=( PhysicalDeviceBufferDeviceAddressFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  48933. {
  48934. return !operator==( rhs );
  48935. }
  48936. #endif
  48937. public:
  48938. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeatures;
  48939. void* pNext = {};
  48940. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
  48941. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
  48942. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
  48943. };
  48944. static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ), "struct and wrapper have different size!" );
  48945. static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeatures>::value, "struct wrapper is not a standard layout!" );
  48946. template <>
  48947. struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeatures>
  48948. {
  48949. using Type = PhysicalDeviceBufferDeviceAddressFeatures;
  48950. };
  48951. using PhysicalDeviceBufferDeviceAddressFeaturesKHR = PhysicalDeviceBufferDeviceAddressFeatures;
  48952. struct PhysicalDeviceBufferDeviceAddressFeaturesEXT
  48953. {
  48954. static const bool allowDuplicate = false;
  48955. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
  48956. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48957. VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}) VULKAN_HPP_NOEXCEPT
  48958. : bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ )
  48959. {}
  48960. VULKAN_HPP_CONSTEXPR PhysicalDeviceBufferDeviceAddressFeaturesEXT( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48961. PhysicalDeviceBufferDeviceAddressFeaturesEXT( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48962. : PhysicalDeviceBufferDeviceAddressFeaturesEXT( *reinterpret_cast<PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs ) )
  48963. {}
  48964. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  48965. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  48966. PhysicalDeviceBufferDeviceAddressFeaturesEXT & operator=( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  48967. {
  48968. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT const *>( &rhs );
  48969. return *this;
  48970. }
  48971. PhysicalDeviceBufferDeviceAddressFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  48972. {
  48973. pNext = pNext_;
  48974. return *this;
  48975. }
  48976. PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
  48977. {
  48978. bufferDeviceAddress = bufferDeviceAddress_;
  48979. return *this;
  48980. }
  48981. PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
  48982. {
  48983. bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
  48984. return *this;
  48985. }
  48986. PhysicalDeviceBufferDeviceAddressFeaturesEXT & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
  48987. {
  48988. bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
  48989. return *this;
  48990. }
  48991. operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  48992. {
  48993. return *reinterpret_cast<const VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
  48994. }
  48995. operator VkPhysicalDeviceBufferDeviceAddressFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  48996. {
  48997. return *reinterpret_cast<VkPhysicalDeviceBufferDeviceAddressFeaturesEXT*>( this );
  48998. }
  48999. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49000. auto operator<=>( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& ) const = default;
  49001. #else
  49002. bool operator==( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49003. {
  49004. return ( sType == rhs.sType )
  49005. && ( pNext == rhs.pNext )
  49006. && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
  49007. && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
  49008. && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice );
  49009. }
  49010. bool operator!=( PhysicalDeviceBufferDeviceAddressFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49011. {
  49012. return !operator==( rhs );
  49013. }
  49014. #endif
  49015. public:
  49016. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT;
  49017. void* pNext = {};
  49018. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
  49019. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
  49020. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
  49021. };
  49022. static_assert( sizeof( PhysicalDeviceBufferDeviceAddressFeaturesEXT ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ), "struct and wrapper have different size!" );
  49023. static_assert( std::is_standard_layout<PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  49024. template <>
  49025. struct CppType<StructureType, StructureType::ePhysicalDeviceBufferDeviceAddressFeaturesEXT>
  49026. {
  49027. using Type = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
  49028. };
  49029. using PhysicalDeviceBufferAddressFeaturesEXT = PhysicalDeviceBufferDeviceAddressFeaturesEXT;
  49030. struct PhysicalDeviceCoherentMemoryFeaturesAMD
  49031. {
  49032. static const bool allowDuplicate = false;
  49033. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
  49034. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49035. VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD(VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ = {}) VULKAN_HPP_NOEXCEPT
  49036. : deviceCoherentMemory( deviceCoherentMemory_ )
  49037. {}
  49038. VULKAN_HPP_CONSTEXPR PhysicalDeviceCoherentMemoryFeaturesAMD( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49039. PhysicalDeviceCoherentMemoryFeaturesAMD( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  49040. : PhysicalDeviceCoherentMemoryFeaturesAMD( *reinterpret_cast<PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs ) )
  49041. {}
  49042. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49043. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( PhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49044. PhysicalDeviceCoherentMemoryFeaturesAMD & operator=( VkPhysicalDeviceCoherentMemoryFeaturesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  49045. {
  49046. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD const *>( &rhs );
  49047. return *this;
  49048. }
  49049. PhysicalDeviceCoherentMemoryFeaturesAMD & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49050. {
  49051. pNext = pNext_;
  49052. return *this;
  49053. }
  49054. PhysicalDeviceCoherentMemoryFeaturesAMD & setDeviceCoherentMemory( VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory_ ) VULKAN_HPP_NOEXCEPT
  49055. {
  49056. deviceCoherentMemory = deviceCoherentMemory_;
  49057. return *this;
  49058. }
  49059. operator VkPhysicalDeviceCoherentMemoryFeaturesAMD const&() const VULKAN_HPP_NOEXCEPT
  49060. {
  49061. return *reinterpret_cast<const VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
  49062. }
  49063. operator VkPhysicalDeviceCoherentMemoryFeaturesAMD &() VULKAN_HPP_NOEXCEPT
  49064. {
  49065. return *reinterpret_cast<VkPhysicalDeviceCoherentMemoryFeaturesAMD*>( this );
  49066. }
  49067. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49068. auto operator<=>( PhysicalDeviceCoherentMemoryFeaturesAMD const& ) const = default;
  49069. #else
  49070. bool operator==( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  49071. {
  49072. return ( sType == rhs.sType )
  49073. && ( pNext == rhs.pNext )
  49074. && ( deviceCoherentMemory == rhs.deviceCoherentMemory );
  49075. }
  49076. bool operator!=( PhysicalDeviceCoherentMemoryFeaturesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  49077. {
  49078. return !operator==( rhs );
  49079. }
  49080. #endif
  49081. public:
  49082. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD;
  49083. void* pNext = {};
  49084. VULKAN_HPP_NAMESPACE::Bool32 deviceCoherentMemory = {};
  49085. };
  49086. static_assert( sizeof( PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ), "struct and wrapper have different size!" );
  49087. static_assert( std::is_standard_layout<PhysicalDeviceCoherentMemoryFeaturesAMD>::value, "struct wrapper is not a standard layout!" );
  49088. template <>
  49089. struct CppType<StructureType, StructureType::ePhysicalDeviceCoherentMemoryFeaturesAMD>
  49090. {
  49091. using Type = PhysicalDeviceCoherentMemoryFeaturesAMD;
  49092. };
  49093. struct PhysicalDeviceComputeShaderDerivativesFeaturesNV
  49094. {
  49095. static const bool allowDuplicate = false;
  49096. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
  49097. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49098. VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ = {}) VULKAN_HPP_NOEXCEPT
  49099. : computeDerivativeGroupQuads( computeDerivativeGroupQuads_ ), computeDerivativeGroupLinear( computeDerivativeGroupLinear_ )
  49100. {}
  49101. VULKAN_HPP_CONSTEXPR PhysicalDeviceComputeShaderDerivativesFeaturesNV( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49102. PhysicalDeviceComputeShaderDerivativesFeaturesNV( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49103. : PhysicalDeviceComputeShaderDerivativesFeaturesNV( *reinterpret_cast<PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs ) )
  49104. {}
  49105. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49106. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49107. PhysicalDeviceComputeShaderDerivativesFeaturesNV & operator=( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49108. {
  49109. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV const *>( &rhs );
  49110. return *this;
  49111. }
  49112. PhysicalDeviceComputeShaderDerivativesFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49113. {
  49114. pNext = pNext_;
  49115. return *this;
  49116. }
  49117. PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupQuads( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads_ ) VULKAN_HPP_NOEXCEPT
  49118. {
  49119. computeDerivativeGroupQuads = computeDerivativeGroupQuads_;
  49120. return *this;
  49121. }
  49122. PhysicalDeviceComputeShaderDerivativesFeaturesNV & setComputeDerivativeGroupLinear( VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear_ ) VULKAN_HPP_NOEXCEPT
  49123. {
  49124. computeDerivativeGroupLinear = computeDerivativeGroupLinear_;
  49125. return *this;
  49126. }
  49127. operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  49128. {
  49129. return *reinterpret_cast<const VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
  49130. }
  49131. operator VkPhysicalDeviceComputeShaderDerivativesFeaturesNV &() VULKAN_HPP_NOEXCEPT
  49132. {
  49133. return *reinterpret_cast<VkPhysicalDeviceComputeShaderDerivativesFeaturesNV*>( this );
  49134. }
  49135. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49136. auto operator<=>( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& ) const = default;
  49137. #else
  49138. bool operator==( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49139. {
  49140. return ( sType == rhs.sType )
  49141. && ( pNext == rhs.pNext )
  49142. && ( computeDerivativeGroupQuads == rhs.computeDerivativeGroupQuads )
  49143. && ( computeDerivativeGroupLinear == rhs.computeDerivativeGroupLinear );
  49144. }
  49145. bool operator!=( PhysicalDeviceComputeShaderDerivativesFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49146. {
  49147. return !operator==( rhs );
  49148. }
  49149. #endif
  49150. public:
  49151. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV;
  49152. void* pNext = {};
  49153. VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupQuads = {};
  49154. VULKAN_HPP_NAMESPACE::Bool32 computeDerivativeGroupLinear = {};
  49155. };
  49156. static_assert( sizeof( PhysicalDeviceComputeShaderDerivativesFeaturesNV ) == sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ), "struct and wrapper have different size!" );
  49157. static_assert( std::is_standard_layout<PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  49158. template <>
  49159. struct CppType<StructureType, StructureType::ePhysicalDeviceComputeShaderDerivativesFeaturesNV>
  49160. {
  49161. using Type = PhysicalDeviceComputeShaderDerivativesFeaturesNV;
  49162. };
  49163. struct PhysicalDeviceConditionalRenderingFeaturesEXT
  49164. {
  49165. static const bool allowDuplicate = false;
  49166. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
  49167. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49168. VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ = {}, VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ = {}) VULKAN_HPP_NOEXCEPT
  49169. : conditionalRendering( conditionalRendering_ ), inheritedConditionalRendering( inheritedConditionalRendering_ )
  49170. {}
  49171. VULKAN_HPP_CONSTEXPR PhysicalDeviceConditionalRenderingFeaturesEXT( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49172. PhysicalDeviceConditionalRenderingFeaturesEXT( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49173. : PhysicalDeviceConditionalRenderingFeaturesEXT( *reinterpret_cast<PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs ) )
  49174. {}
  49175. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49176. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( PhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49177. PhysicalDeviceConditionalRenderingFeaturesEXT & operator=( VkPhysicalDeviceConditionalRenderingFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49178. {
  49179. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT const *>( &rhs );
  49180. return *this;
  49181. }
  49182. PhysicalDeviceConditionalRenderingFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49183. {
  49184. pNext = pNext_;
  49185. return *this;
  49186. }
  49187. PhysicalDeviceConditionalRenderingFeaturesEXT & setConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering_ ) VULKAN_HPP_NOEXCEPT
  49188. {
  49189. conditionalRendering = conditionalRendering_;
  49190. return *this;
  49191. }
  49192. PhysicalDeviceConditionalRenderingFeaturesEXT & setInheritedConditionalRendering( VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering_ ) VULKAN_HPP_NOEXCEPT
  49193. {
  49194. inheritedConditionalRendering = inheritedConditionalRendering_;
  49195. return *this;
  49196. }
  49197. operator VkPhysicalDeviceConditionalRenderingFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  49198. {
  49199. return *reinterpret_cast<const VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
  49200. }
  49201. operator VkPhysicalDeviceConditionalRenderingFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  49202. {
  49203. return *reinterpret_cast<VkPhysicalDeviceConditionalRenderingFeaturesEXT*>( this );
  49204. }
  49205. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49206. auto operator<=>( PhysicalDeviceConditionalRenderingFeaturesEXT const& ) const = default;
  49207. #else
  49208. bool operator==( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49209. {
  49210. return ( sType == rhs.sType )
  49211. && ( pNext == rhs.pNext )
  49212. && ( conditionalRendering == rhs.conditionalRendering )
  49213. && ( inheritedConditionalRendering == rhs.inheritedConditionalRendering );
  49214. }
  49215. bool operator!=( PhysicalDeviceConditionalRenderingFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49216. {
  49217. return !operator==( rhs );
  49218. }
  49219. #endif
  49220. public:
  49221. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT;
  49222. void* pNext = {};
  49223. VULKAN_HPP_NAMESPACE::Bool32 conditionalRendering = {};
  49224. VULKAN_HPP_NAMESPACE::Bool32 inheritedConditionalRendering = {};
  49225. };
  49226. static_assert( sizeof( PhysicalDeviceConditionalRenderingFeaturesEXT ) == sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ), "struct and wrapper have different size!" );
  49227. static_assert( std::is_standard_layout<PhysicalDeviceConditionalRenderingFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  49228. template <>
  49229. struct CppType<StructureType, StructureType::ePhysicalDeviceConditionalRenderingFeaturesEXT>
  49230. {
  49231. using Type = PhysicalDeviceConditionalRenderingFeaturesEXT;
  49232. };
  49233. struct PhysicalDeviceConservativeRasterizationPropertiesEXT
  49234. {
  49235. static const bool allowDuplicate = false;
  49236. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
  49237. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49238. VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT(float primitiveOverestimationSize_ = {}, float maxExtraPrimitiveOverestimationSize_ = {}, float extraPrimitiveOverestimationSizeGranularity_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable_ = {}, VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage_ = {}) VULKAN_HPP_NOEXCEPT
  49239. : primitiveOverestimationSize( primitiveOverestimationSize_ ), maxExtraPrimitiveOverestimationSize( maxExtraPrimitiveOverestimationSize_ ), extraPrimitiveOverestimationSizeGranularity( extraPrimitiveOverestimationSizeGranularity_ ), primitiveUnderestimation( primitiveUnderestimation_ ), conservativePointAndLineRasterization( conservativePointAndLineRasterization_ ), degenerateTrianglesRasterized( degenerateTrianglesRasterized_ ), degenerateLinesRasterized( degenerateLinesRasterized_ ), fullyCoveredFragmentShaderInputVariable( fullyCoveredFragmentShaderInputVariable_ ), conservativeRasterizationPostDepthCoverage( conservativeRasterizationPostDepthCoverage_ )
  49240. {}
  49241. VULKAN_HPP_CONSTEXPR PhysicalDeviceConservativeRasterizationPropertiesEXT( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49242. PhysicalDeviceConservativeRasterizationPropertiesEXT( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49243. : PhysicalDeviceConservativeRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs ) )
  49244. {}
  49245. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49246. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( PhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49247. PhysicalDeviceConservativeRasterizationPropertiesEXT & operator=( VkPhysicalDeviceConservativeRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49248. {
  49249. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT const *>( &rhs );
  49250. return *this;
  49251. }
  49252. operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  49253. {
  49254. return *reinterpret_cast<const VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
  49255. }
  49256. operator VkPhysicalDeviceConservativeRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  49257. {
  49258. return *reinterpret_cast<VkPhysicalDeviceConservativeRasterizationPropertiesEXT*>( this );
  49259. }
  49260. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49261. auto operator<=>( PhysicalDeviceConservativeRasterizationPropertiesEXT const& ) const = default;
  49262. #else
  49263. bool operator==( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49264. {
  49265. return ( sType == rhs.sType )
  49266. && ( pNext == rhs.pNext )
  49267. && ( primitiveOverestimationSize == rhs.primitiveOverestimationSize )
  49268. && ( maxExtraPrimitiveOverestimationSize == rhs.maxExtraPrimitiveOverestimationSize )
  49269. && ( extraPrimitiveOverestimationSizeGranularity == rhs.extraPrimitiveOverestimationSizeGranularity )
  49270. && ( primitiveUnderestimation == rhs.primitiveUnderestimation )
  49271. && ( conservativePointAndLineRasterization == rhs.conservativePointAndLineRasterization )
  49272. && ( degenerateTrianglesRasterized == rhs.degenerateTrianglesRasterized )
  49273. && ( degenerateLinesRasterized == rhs.degenerateLinesRasterized )
  49274. && ( fullyCoveredFragmentShaderInputVariable == rhs.fullyCoveredFragmentShaderInputVariable )
  49275. && ( conservativeRasterizationPostDepthCoverage == rhs.conservativeRasterizationPostDepthCoverage );
  49276. }
  49277. bool operator!=( PhysicalDeviceConservativeRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49278. {
  49279. return !operator==( rhs );
  49280. }
  49281. #endif
  49282. public:
  49283. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT;
  49284. void* pNext = {};
  49285. float primitiveOverestimationSize = {};
  49286. float maxExtraPrimitiveOverestimationSize = {};
  49287. float extraPrimitiveOverestimationSizeGranularity = {};
  49288. VULKAN_HPP_NAMESPACE::Bool32 primitiveUnderestimation = {};
  49289. VULKAN_HPP_NAMESPACE::Bool32 conservativePointAndLineRasterization = {};
  49290. VULKAN_HPP_NAMESPACE::Bool32 degenerateTrianglesRasterized = {};
  49291. VULKAN_HPP_NAMESPACE::Bool32 degenerateLinesRasterized = {};
  49292. VULKAN_HPP_NAMESPACE::Bool32 fullyCoveredFragmentShaderInputVariable = {};
  49293. VULKAN_HPP_NAMESPACE::Bool32 conservativeRasterizationPostDepthCoverage = {};
  49294. };
  49295. static_assert( sizeof( PhysicalDeviceConservativeRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
  49296. static_assert( std::is_standard_layout<PhysicalDeviceConservativeRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  49297. template <>
  49298. struct CppType<StructureType, StructureType::ePhysicalDeviceConservativeRasterizationPropertiesEXT>
  49299. {
  49300. using Type = PhysicalDeviceConservativeRasterizationPropertiesEXT;
  49301. };
  49302. struct PhysicalDeviceCooperativeMatrixFeaturesNV
  49303. {
  49304. static const bool allowDuplicate = false;
  49305. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
  49306. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49307. VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ = {}, VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ = {}) VULKAN_HPP_NOEXCEPT
  49308. : cooperativeMatrix( cooperativeMatrix_ ), cooperativeMatrixRobustBufferAccess( cooperativeMatrixRobustBufferAccess_ )
  49309. {}
  49310. VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixFeaturesNV( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49311. PhysicalDeviceCooperativeMatrixFeaturesNV( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49312. : PhysicalDeviceCooperativeMatrixFeaturesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs ) )
  49313. {}
  49314. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49315. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( PhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49316. PhysicalDeviceCooperativeMatrixFeaturesNV & operator=( VkPhysicalDeviceCooperativeMatrixFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49317. {
  49318. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV const *>( &rhs );
  49319. return *this;
  49320. }
  49321. PhysicalDeviceCooperativeMatrixFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49322. {
  49323. pNext = pNext_;
  49324. return *this;
  49325. }
  49326. PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrix( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix_ ) VULKAN_HPP_NOEXCEPT
  49327. {
  49328. cooperativeMatrix = cooperativeMatrix_;
  49329. return *this;
  49330. }
  49331. PhysicalDeviceCooperativeMatrixFeaturesNV & setCooperativeMatrixRobustBufferAccess( VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess_ ) VULKAN_HPP_NOEXCEPT
  49332. {
  49333. cooperativeMatrixRobustBufferAccess = cooperativeMatrixRobustBufferAccess_;
  49334. return *this;
  49335. }
  49336. operator VkPhysicalDeviceCooperativeMatrixFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  49337. {
  49338. return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
  49339. }
  49340. operator VkPhysicalDeviceCooperativeMatrixFeaturesNV &() VULKAN_HPP_NOEXCEPT
  49341. {
  49342. return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixFeaturesNV*>( this );
  49343. }
  49344. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49345. auto operator<=>( PhysicalDeviceCooperativeMatrixFeaturesNV const& ) const = default;
  49346. #else
  49347. bool operator==( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49348. {
  49349. return ( sType == rhs.sType )
  49350. && ( pNext == rhs.pNext )
  49351. && ( cooperativeMatrix == rhs.cooperativeMatrix )
  49352. && ( cooperativeMatrixRobustBufferAccess == rhs.cooperativeMatrixRobustBufferAccess );
  49353. }
  49354. bool operator!=( PhysicalDeviceCooperativeMatrixFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49355. {
  49356. return !operator==( rhs );
  49357. }
  49358. #endif
  49359. public:
  49360. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV;
  49361. void* pNext = {};
  49362. VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrix = {};
  49363. VULKAN_HPP_NAMESPACE::Bool32 cooperativeMatrixRobustBufferAccess = {};
  49364. };
  49365. static_assert( sizeof( PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ), "struct and wrapper have different size!" );
  49366. static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  49367. template <>
  49368. struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixFeaturesNV>
  49369. {
  49370. using Type = PhysicalDeviceCooperativeMatrixFeaturesNV;
  49371. };
  49372. struct PhysicalDeviceCooperativeMatrixPropertiesNV
  49373. {
  49374. static const bool allowDuplicate = false;
  49375. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
  49376. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49377. VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV(VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages_ = {}) VULKAN_HPP_NOEXCEPT
  49378. : cooperativeMatrixSupportedStages( cooperativeMatrixSupportedStages_ )
  49379. {}
  49380. VULKAN_HPP_CONSTEXPR PhysicalDeviceCooperativeMatrixPropertiesNV( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49381. PhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49382. : PhysicalDeviceCooperativeMatrixPropertiesNV( *reinterpret_cast<PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs ) )
  49383. {}
  49384. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49385. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( PhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49386. PhysicalDeviceCooperativeMatrixPropertiesNV & operator=( VkPhysicalDeviceCooperativeMatrixPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49387. {
  49388. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV const *>( &rhs );
  49389. return *this;
  49390. }
  49391. operator VkPhysicalDeviceCooperativeMatrixPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  49392. {
  49393. return *reinterpret_cast<const VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
  49394. }
  49395. operator VkPhysicalDeviceCooperativeMatrixPropertiesNV &() VULKAN_HPP_NOEXCEPT
  49396. {
  49397. return *reinterpret_cast<VkPhysicalDeviceCooperativeMatrixPropertiesNV*>( this );
  49398. }
  49399. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49400. auto operator<=>( PhysicalDeviceCooperativeMatrixPropertiesNV const& ) const = default;
  49401. #else
  49402. bool operator==( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49403. {
  49404. return ( sType == rhs.sType )
  49405. && ( pNext == rhs.pNext )
  49406. && ( cooperativeMatrixSupportedStages == rhs.cooperativeMatrixSupportedStages );
  49407. }
  49408. bool operator!=( PhysicalDeviceCooperativeMatrixPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49409. {
  49410. return !operator==( rhs );
  49411. }
  49412. #endif
  49413. public:
  49414. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV;
  49415. void* pNext = {};
  49416. VULKAN_HPP_NAMESPACE::ShaderStageFlags cooperativeMatrixSupportedStages = {};
  49417. };
  49418. static_assert( sizeof( PhysicalDeviceCooperativeMatrixPropertiesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ), "struct and wrapper have different size!" );
  49419. static_assert( std::is_standard_layout<PhysicalDeviceCooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  49420. template <>
  49421. struct CppType<StructureType, StructureType::ePhysicalDeviceCooperativeMatrixPropertiesNV>
  49422. {
  49423. using Type = PhysicalDeviceCooperativeMatrixPropertiesNV;
  49424. };
  49425. struct PhysicalDeviceCornerSampledImageFeaturesNV
  49426. {
  49427. static const bool allowDuplicate = false;
  49428. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
  49429. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49430. VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ = {}) VULKAN_HPP_NOEXCEPT
  49431. : cornerSampledImage( cornerSampledImage_ )
  49432. {}
  49433. VULKAN_HPP_CONSTEXPR PhysicalDeviceCornerSampledImageFeaturesNV( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49434. PhysicalDeviceCornerSampledImageFeaturesNV( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49435. : PhysicalDeviceCornerSampledImageFeaturesNV( *reinterpret_cast<PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs ) )
  49436. {}
  49437. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49438. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCornerSampledImageFeaturesNV & operator=( PhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49439. PhysicalDeviceCornerSampledImageFeaturesNV & operator=( VkPhysicalDeviceCornerSampledImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49440. {
  49441. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV const *>( &rhs );
  49442. return *this;
  49443. }
  49444. PhysicalDeviceCornerSampledImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49445. {
  49446. pNext = pNext_;
  49447. return *this;
  49448. }
  49449. PhysicalDeviceCornerSampledImageFeaturesNV & setCornerSampledImage( VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage_ ) VULKAN_HPP_NOEXCEPT
  49450. {
  49451. cornerSampledImage = cornerSampledImage_;
  49452. return *this;
  49453. }
  49454. operator VkPhysicalDeviceCornerSampledImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  49455. {
  49456. return *reinterpret_cast<const VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
  49457. }
  49458. operator VkPhysicalDeviceCornerSampledImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
  49459. {
  49460. return *reinterpret_cast<VkPhysicalDeviceCornerSampledImageFeaturesNV*>( this );
  49461. }
  49462. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49463. auto operator<=>( PhysicalDeviceCornerSampledImageFeaturesNV const& ) const = default;
  49464. #else
  49465. bool operator==( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49466. {
  49467. return ( sType == rhs.sType )
  49468. && ( pNext == rhs.pNext )
  49469. && ( cornerSampledImage == rhs.cornerSampledImage );
  49470. }
  49471. bool operator!=( PhysicalDeviceCornerSampledImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49472. {
  49473. return !operator==( rhs );
  49474. }
  49475. #endif
  49476. public:
  49477. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV;
  49478. void* pNext = {};
  49479. VULKAN_HPP_NAMESPACE::Bool32 cornerSampledImage = {};
  49480. };
  49481. static_assert( sizeof( PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ), "struct and wrapper have different size!" );
  49482. static_assert( std::is_standard_layout<PhysicalDeviceCornerSampledImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  49483. template <>
  49484. struct CppType<StructureType, StructureType::ePhysicalDeviceCornerSampledImageFeaturesNV>
  49485. {
  49486. using Type = PhysicalDeviceCornerSampledImageFeaturesNV;
  49487. };
  49488. struct PhysicalDeviceCoverageReductionModeFeaturesNV
  49489. {
  49490. static const bool allowDuplicate = false;
  49491. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
  49492. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49493. VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ = {}) VULKAN_HPP_NOEXCEPT
  49494. : coverageReductionMode( coverageReductionMode_ )
  49495. {}
  49496. VULKAN_HPP_CONSTEXPR PhysicalDeviceCoverageReductionModeFeaturesNV( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49497. PhysicalDeviceCoverageReductionModeFeaturesNV( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49498. : PhysicalDeviceCoverageReductionModeFeaturesNV( *reinterpret_cast<PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs ) )
  49499. {}
  49500. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49501. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( PhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49502. PhysicalDeviceCoverageReductionModeFeaturesNV & operator=( VkPhysicalDeviceCoverageReductionModeFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49503. {
  49504. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV const *>( &rhs );
  49505. return *this;
  49506. }
  49507. PhysicalDeviceCoverageReductionModeFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49508. {
  49509. pNext = pNext_;
  49510. return *this;
  49511. }
  49512. PhysicalDeviceCoverageReductionModeFeaturesNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
  49513. {
  49514. coverageReductionMode = coverageReductionMode_;
  49515. return *this;
  49516. }
  49517. operator VkPhysicalDeviceCoverageReductionModeFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  49518. {
  49519. return *reinterpret_cast<const VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
  49520. }
  49521. operator VkPhysicalDeviceCoverageReductionModeFeaturesNV &() VULKAN_HPP_NOEXCEPT
  49522. {
  49523. return *reinterpret_cast<VkPhysicalDeviceCoverageReductionModeFeaturesNV*>( this );
  49524. }
  49525. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49526. auto operator<=>( PhysicalDeviceCoverageReductionModeFeaturesNV const& ) const = default;
  49527. #else
  49528. bool operator==( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49529. {
  49530. return ( sType == rhs.sType )
  49531. && ( pNext == rhs.pNext )
  49532. && ( coverageReductionMode == rhs.coverageReductionMode );
  49533. }
  49534. bool operator!=( PhysicalDeviceCoverageReductionModeFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49535. {
  49536. return !operator==( rhs );
  49537. }
  49538. #endif
  49539. public:
  49540. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV;
  49541. void* pNext = {};
  49542. VULKAN_HPP_NAMESPACE::Bool32 coverageReductionMode = {};
  49543. };
  49544. static_assert( sizeof( PhysicalDeviceCoverageReductionModeFeaturesNV ) == sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ), "struct and wrapper have different size!" );
  49545. static_assert( std::is_standard_layout<PhysicalDeviceCoverageReductionModeFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  49546. template <>
  49547. struct CppType<StructureType, StructureType::ePhysicalDeviceCoverageReductionModeFeaturesNV>
  49548. {
  49549. using Type = PhysicalDeviceCoverageReductionModeFeaturesNV;
  49550. };
  49551. struct PhysicalDeviceCustomBorderColorFeaturesEXT
  49552. {
  49553. static const bool allowDuplicate = false;
  49554. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
  49555. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49556. VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ = {}) VULKAN_HPP_NOEXCEPT
  49557. : customBorderColors( customBorderColors_ ), customBorderColorWithoutFormat( customBorderColorWithoutFormat_ )
  49558. {}
  49559. VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorFeaturesEXT( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49560. PhysicalDeviceCustomBorderColorFeaturesEXT( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49561. : PhysicalDeviceCustomBorderColorFeaturesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs ) )
  49562. {}
  49563. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49564. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( PhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49565. PhysicalDeviceCustomBorderColorFeaturesEXT & operator=( VkPhysicalDeviceCustomBorderColorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49566. {
  49567. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT const *>( &rhs );
  49568. return *this;
  49569. }
  49570. PhysicalDeviceCustomBorderColorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49571. {
  49572. pNext = pNext_;
  49573. return *this;
  49574. }
  49575. PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColors( VULKAN_HPP_NAMESPACE::Bool32 customBorderColors_ ) VULKAN_HPP_NOEXCEPT
  49576. {
  49577. customBorderColors = customBorderColors_;
  49578. return *this;
  49579. }
  49580. PhysicalDeviceCustomBorderColorFeaturesEXT & setCustomBorderColorWithoutFormat( VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat_ ) VULKAN_HPP_NOEXCEPT
  49581. {
  49582. customBorderColorWithoutFormat = customBorderColorWithoutFormat_;
  49583. return *this;
  49584. }
  49585. operator VkPhysicalDeviceCustomBorderColorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  49586. {
  49587. return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
  49588. }
  49589. operator VkPhysicalDeviceCustomBorderColorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  49590. {
  49591. return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorFeaturesEXT*>( this );
  49592. }
  49593. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49594. auto operator<=>( PhysicalDeviceCustomBorderColorFeaturesEXT const& ) const = default;
  49595. #else
  49596. bool operator==( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49597. {
  49598. return ( sType == rhs.sType )
  49599. && ( pNext == rhs.pNext )
  49600. && ( customBorderColors == rhs.customBorderColors )
  49601. && ( customBorderColorWithoutFormat == rhs.customBorderColorWithoutFormat );
  49602. }
  49603. bool operator!=( PhysicalDeviceCustomBorderColorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49604. {
  49605. return !operator==( rhs );
  49606. }
  49607. #endif
  49608. public:
  49609. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT;
  49610. void* pNext = {};
  49611. VULKAN_HPP_NAMESPACE::Bool32 customBorderColors = {};
  49612. VULKAN_HPP_NAMESPACE::Bool32 customBorderColorWithoutFormat = {};
  49613. };
  49614. static_assert( sizeof( PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ), "struct and wrapper have different size!" );
  49615. static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  49616. template <>
  49617. struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorFeaturesEXT>
  49618. {
  49619. using Type = PhysicalDeviceCustomBorderColorFeaturesEXT;
  49620. };
  49621. struct PhysicalDeviceCustomBorderColorPropertiesEXT
  49622. {
  49623. static const bool allowDuplicate = false;
  49624. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
  49625. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49626. VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT(uint32_t maxCustomBorderColorSamplers_ = {}) VULKAN_HPP_NOEXCEPT
  49627. : maxCustomBorderColorSamplers( maxCustomBorderColorSamplers_ )
  49628. {}
  49629. VULKAN_HPP_CONSTEXPR PhysicalDeviceCustomBorderColorPropertiesEXT( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49630. PhysicalDeviceCustomBorderColorPropertiesEXT( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49631. : PhysicalDeviceCustomBorderColorPropertiesEXT( *reinterpret_cast<PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs ) )
  49632. {}
  49633. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49634. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( PhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49635. PhysicalDeviceCustomBorderColorPropertiesEXT & operator=( VkPhysicalDeviceCustomBorderColorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49636. {
  49637. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT const *>( &rhs );
  49638. return *this;
  49639. }
  49640. operator VkPhysicalDeviceCustomBorderColorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  49641. {
  49642. return *reinterpret_cast<const VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
  49643. }
  49644. operator VkPhysicalDeviceCustomBorderColorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  49645. {
  49646. return *reinterpret_cast<VkPhysicalDeviceCustomBorderColorPropertiesEXT*>( this );
  49647. }
  49648. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49649. auto operator<=>( PhysicalDeviceCustomBorderColorPropertiesEXT const& ) const = default;
  49650. #else
  49651. bool operator==( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49652. {
  49653. return ( sType == rhs.sType )
  49654. && ( pNext == rhs.pNext )
  49655. && ( maxCustomBorderColorSamplers == rhs.maxCustomBorderColorSamplers );
  49656. }
  49657. bool operator!=( PhysicalDeviceCustomBorderColorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49658. {
  49659. return !operator==( rhs );
  49660. }
  49661. #endif
  49662. public:
  49663. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT;
  49664. void* pNext = {};
  49665. uint32_t maxCustomBorderColorSamplers = {};
  49666. };
  49667. static_assert( sizeof( PhysicalDeviceCustomBorderColorPropertiesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ), "struct and wrapper have different size!" );
  49668. static_assert( std::is_standard_layout<PhysicalDeviceCustomBorderColorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  49669. template <>
  49670. struct CppType<StructureType, StructureType::ePhysicalDeviceCustomBorderColorPropertiesEXT>
  49671. {
  49672. using Type = PhysicalDeviceCustomBorderColorPropertiesEXT;
  49673. };
  49674. struct PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV
  49675. {
  49676. static const bool allowDuplicate = false;
  49677. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  49678. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49679. VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ = {}) VULKAN_HPP_NOEXCEPT
  49680. : dedicatedAllocationImageAliasing( dedicatedAllocationImageAliasing_ )
  49681. {}
  49682. VULKAN_HPP_CONSTEXPR PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49683. PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49684. : PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV( *reinterpret_cast<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs ) )
  49685. {}
  49686. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49687. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49688. PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & operator=( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  49689. {
  49690. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const *>( &rhs );
  49691. return *this;
  49692. }
  49693. PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49694. {
  49695. pNext = pNext_;
  49696. return *this;
  49697. }
  49698. PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV & setDedicatedAllocationImageAliasing( VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing_ ) VULKAN_HPP_NOEXCEPT
  49699. {
  49700. dedicatedAllocationImageAliasing = dedicatedAllocationImageAliasing_;
  49701. return *this;
  49702. }
  49703. operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  49704. {
  49705. return *reinterpret_cast<const VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
  49706. }
  49707. operator VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV &() VULKAN_HPP_NOEXCEPT
  49708. {
  49709. return *reinterpret_cast<VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV*>( this );
  49710. }
  49711. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49712. auto operator<=>( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& ) const = default;
  49713. #else
  49714. bool operator==( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49715. {
  49716. return ( sType == rhs.sType )
  49717. && ( pNext == rhs.pNext )
  49718. && ( dedicatedAllocationImageAliasing == rhs.dedicatedAllocationImageAliasing );
  49719. }
  49720. bool operator!=( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  49721. {
  49722. return !operator==( rhs );
  49723. }
  49724. #endif
  49725. public:
  49726. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  49727. void* pNext = {};
  49728. VULKAN_HPP_NAMESPACE::Bool32 dedicatedAllocationImageAliasing = {};
  49729. };
  49730. static_assert( sizeof( PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) == sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ), "struct and wrapper have different size!" );
  49731. static_assert( std::is_standard_layout<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  49732. template <>
  49733. struct CppType<StructureType, StructureType::ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>
  49734. {
  49735. using Type = PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
  49736. };
  49737. struct PhysicalDeviceDepthClipEnableFeaturesEXT
  49738. {
  49739. static const bool allowDuplicate = false;
  49740. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
  49741. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49742. VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
  49743. : depthClipEnable( depthClipEnable_ )
  49744. {}
  49745. VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthClipEnableFeaturesEXT( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49746. PhysicalDeviceDepthClipEnableFeaturesEXT( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49747. : PhysicalDeviceDepthClipEnableFeaturesEXT( *reinterpret_cast<PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs ) )
  49748. {}
  49749. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49750. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( PhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49751. PhysicalDeviceDepthClipEnableFeaturesEXT & operator=( VkPhysicalDeviceDepthClipEnableFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  49752. {
  49753. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT const *>( &rhs );
  49754. return *this;
  49755. }
  49756. PhysicalDeviceDepthClipEnableFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49757. {
  49758. pNext = pNext_;
  49759. return *this;
  49760. }
  49761. PhysicalDeviceDepthClipEnableFeaturesEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
  49762. {
  49763. depthClipEnable = depthClipEnable_;
  49764. return *this;
  49765. }
  49766. operator VkPhysicalDeviceDepthClipEnableFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  49767. {
  49768. return *reinterpret_cast<const VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
  49769. }
  49770. operator VkPhysicalDeviceDepthClipEnableFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  49771. {
  49772. return *reinterpret_cast<VkPhysicalDeviceDepthClipEnableFeaturesEXT*>( this );
  49773. }
  49774. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49775. auto operator<=>( PhysicalDeviceDepthClipEnableFeaturesEXT const& ) const = default;
  49776. #else
  49777. bool operator==( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49778. {
  49779. return ( sType == rhs.sType )
  49780. && ( pNext == rhs.pNext )
  49781. && ( depthClipEnable == rhs.depthClipEnable );
  49782. }
  49783. bool operator!=( PhysicalDeviceDepthClipEnableFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  49784. {
  49785. return !operator==( rhs );
  49786. }
  49787. #endif
  49788. public:
  49789. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT;
  49790. void* pNext = {};
  49791. VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
  49792. };
  49793. static_assert( sizeof( PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ), "struct and wrapper have different size!" );
  49794. static_assert( std::is_standard_layout<PhysicalDeviceDepthClipEnableFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  49795. template <>
  49796. struct CppType<StructureType, StructureType::ePhysicalDeviceDepthClipEnableFeaturesEXT>
  49797. {
  49798. using Type = PhysicalDeviceDepthClipEnableFeaturesEXT;
  49799. };
  49800. struct PhysicalDeviceDepthStencilResolveProperties
  49801. {
  49802. static const bool allowDuplicate = false;
  49803. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
  49804. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49805. VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties(VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}) VULKAN_HPP_NOEXCEPT
  49806. : supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ )
  49807. {}
  49808. VULKAN_HPP_CONSTEXPR PhysicalDeviceDepthStencilResolveProperties( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49809. PhysicalDeviceDepthStencilResolveProperties( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  49810. : PhysicalDeviceDepthStencilResolveProperties( *reinterpret_cast<PhysicalDeviceDepthStencilResolveProperties const *>( &rhs ) )
  49811. {}
  49812. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49813. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDepthStencilResolveProperties & operator=( PhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49814. PhysicalDeviceDepthStencilResolveProperties & operator=( VkPhysicalDeviceDepthStencilResolveProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  49815. {
  49816. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties const *>( &rhs );
  49817. return *this;
  49818. }
  49819. operator VkPhysicalDeviceDepthStencilResolveProperties const&() const VULKAN_HPP_NOEXCEPT
  49820. {
  49821. return *reinterpret_cast<const VkPhysicalDeviceDepthStencilResolveProperties*>( this );
  49822. }
  49823. operator VkPhysicalDeviceDepthStencilResolveProperties &() VULKAN_HPP_NOEXCEPT
  49824. {
  49825. return *reinterpret_cast<VkPhysicalDeviceDepthStencilResolveProperties*>( this );
  49826. }
  49827. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49828. auto operator<=>( PhysicalDeviceDepthStencilResolveProperties const& ) const = default;
  49829. #else
  49830. bool operator==( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  49831. {
  49832. return ( sType == rhs.sType )
  49833. && ( pNext == rhs.pNext )
  49834. && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
  49835. && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
  49836. && ( independentResolveNone == rhs.independentResolveNone )
  49837. && ( independentResolve == rhs.independentResolve );
  49838. }
  49839. bool operator!=( PhysicalDeviceDepthStencilResolveProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  49840. {
  49841. return !operator==( rhs );
  49842. }
  49843. #endif
  49844. public:
  49845. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDepthStencilResolveProperties;
  49846. void* pNext = {};
  49847. VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
  49848. VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
  49849. VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
  49850. VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
  49851. };
  49852. static_assert( sizeof( PhysicalDeviceDepthStencilResolveProperties ) == sizeof( VkPhysicalDeviceDepthStencilResolveProperties ), "struct and wrapper have different size!" );
  49853. static_assert( std::is_standard_layout<PhysicalDeviceDepthStencilResolveProperties>::value, "struct wrapper is not a standard layout!" );
  49854. template <>
  49855. struct CppType<StructureType, StructureType::ePhysicalDeviceDepthStencilResolveProperties>
  49856. {
  49857. using Type = PhysicalDeviceDepthStencilResolveProperties;
  49858. };
  49859. using PhysicalDeviceDepthStencilResolvePropertiesKHR = PhysicalDeviceDepthStencilResolveProperties;
  49860. struct PhysicalDeviceDescriptorIndexingFeatures
  49861. {
  49862. static const bool allowDuplicate = false;
  49863. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
  49864. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49865. VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}) VULKAN_HPP_NOEXCEPT
  49866. : shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ )
  49867. {}
  49868. VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingFeatures( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49869. PhysicalDeviceDescriptorIndexingFeatures( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  49870. : PhysicalDeviceDescriptorIndexingFeatures( *reinterpret_cast<PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs ) )
  49871. {}
  49872. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  49873. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingFeatures & operator=( PhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  49874. PhysicalDeviceDescriptorIndexingFeatures & operator=( VkPhysicalDeviceDescriptorIndexingFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  49875. {
  49876. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures const *>( &rhs );
  49877. return *this;
  49878. }
  49879. PhysicalDeviceDescriptorIndexingFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  49880. {
  49881. pNext = pNext_;
  49882. return *this;
  49883. }
  49884. PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  49885. {
  49886. shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
  49887. return *this;
  49888. }
  49889. PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  49890. {
  49891. shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
  49892. return *this;
  49893. }
  49894. PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  49895. {
  49896. shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
  49897. return *this;
  49898. }
  49899. PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49900. {
  49901. shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
  49902. return *this;
  49903. }
  49904. PhysicalDeviceDescriptorIndexingFeatures & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49905. {
  49906. shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
  49907. return *this;
  49908. }
  49909. PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49910. {
  49911. shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
  49912. return *this;
  49913. }
  49914. PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49915. {
  49916. shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
  49917. return *this;
  49918. }
  49919. PhysicalDeviceDescriptorIndexingFeatures & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49920. {
  49921. shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
  49922. return *this;
  49923. }
  49924. PhysicalDeviceDescriptorIndexingFeatures & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49925. {
  49926. shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
  49927. return *this;
  49928. }
  49929. PhysicalDeviceDescriptorIndexingFeatures & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  49930. {
  49931. shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
  49932. return *this;
  49933. }
  49934. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49935. {
  49936. descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
  49937. return *this;
  49938. }
  49939. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49940. {
  49941. descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
  49942. return *this;
  49943. }
  49944. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49945. {
  49946. descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
  49947. return *this;
  49948. }
  49949. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49950. {
  49951. descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
  49952. return *this;
  49953. }
  49954. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49955. {
  49956. descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
  49957. return *this;
  49958. }
  49959. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  49960. {
  49961. descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
  49962. return *this;
  49963. }
  49964. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
  49965. {
  49966. descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
  49967. return *this;
  49968. }
  49969. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
  49970. {
  49971. descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
  49972. return *this;
  49973. }
  49974. PhysicalDeviceDescriptorIndexingFeatures & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
  49975. {
  49976. descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
  49977. return *this;
  49978. }
  49979. PhysicalDeviceDescriptorIndexingFeatures & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
  49980. {
  49981. runtimeDescriptorArray = runtimeDescriptorArray_;
  49982. return *this;
  49983. }
  49984. operator VkPhysicalDeviceDescriptorIndexingFeatures const&() const VULKAN_HPP_NOEXCEPT
  49985. {
  49986. return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
  49987. }
  49988. operator VkPhysicalDeviceDescriptorIndexingFeatures &() VULKAN_HPP_NOEXCEPT
  49989. {
  49990. return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingFeatures*>( this );
  49991. }
  49992. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  49993. auto operator<=>( PhysicalDeviceDescriptorIndexingFeatures const& ) const = default;
  49994. #else
  49995. bool operator==( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  49996. {
  49997. return ( sType == rhs.sType )
  49998. && ( pNext == rhs.pNext )
  49999. && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
  50000. && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
  50001. && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
  50002. && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
  50003. && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
  50004. && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
  50005. && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
  50006. && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
  50007. && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
  50008. && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
  50009. && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
  50010. && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
  50011. && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
  50012. && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
  50013. && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
  50014. && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
  50015. && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
  50016. && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
  50017. && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
  50018. && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray );
  50019. }
  50020. bool operator!=( PhysicalDeviceDescriptorIndexingFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  50021. {
  50022. return !operator==( rhs );
  50023. }
  50024. #endif
  50025. public:
  50026. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingFeatures;
  50027. void* pNext = {};
  50028. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
  50029. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
  50030. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
  50031. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
  50032. VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
  50033. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
  50034. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
  50035. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
  50036. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
  50037. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
  50038. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
  50039. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
  50040. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
  50041. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
  50042. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
  50043. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
  50044. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
  50045. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
  50046. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
  50047. VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
  50048. };
  50049. static_assert( sizeof( PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ), "struct and wrapper have different size!" );
  50050. static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingFeatures>::value, "struct wrapper is not a standard layout!" );
  50051. template <>
  50052. struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingFeatures>
  50053. {
  50054. using Type = PhysicalDeviceDescriptorIndexingFeatures;
  50055. };
  50056. using PhysicalDeviceDescriptorIndexingFeaturesEXT = PhysicalDeviceDescriptorIndexingFeatures;
  50057. struct PhysicalDeviceDescriptorIndexingProperties
  50058. {
  50059. static const bool allowDuplicate = false;
  50060. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
  50061. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50062. VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties(uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}) VULKAN_HPP_NOEXCEPT
  50063. : maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ )
  50064. {}
  50065. VULKAN_HPP_CONSTEXPR PhysicalDeviceDescriptorIndexingProperties( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50066. PhysicalDeviceDescriptorIndexingProperties( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50067. : PhysicalDeviceDescriptorIndexingProperties( *reinterpret_cast<PhysicalDeviceDescriptorIndexingProperties const *>( &rhs ) )
  50068. {}
  50069. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50070. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDescriptorIndexingProperties & operator=( PhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50071. PhysicalDeviceDescriptorIndexingProperties & operator=( VkPhysicalDeviceDescriptorIndexingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50072. {
  50073. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties const *>( &rhs );
  50074. return *this;
  50075. }
  50076. operator VkPhysicalDeviceDescriptorIndexingProperties const&() const VULKAN_HPP_NOEXCEPT
  50077. {
  50078. return *reinterpret_cast<const VkPhysicalDeviceDescriptorIndexingProperties*>( this );
  50079. }
  50080. operator VkPhysicalDeviceDescriptorIndexingProperties &() VULKAN_HPP_NOEXCEPT
  50081. {
  50082. return *reinterpret_cast<VkPhysicalDeviceDescriptorIndexingProperties*>( this );
  50083. }
  50084. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50085. auto operator<=>( PhysicalDeviceDescriptorIndexingProperties const& ) const = default;
  50086. #else
  50087. bool operator==( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50088. {
  50089. return ( sType == rhs.sType )
  50090. && ( pNext == rhs.pNext )
  50091. && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
  50092. && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
  50093. && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
  50094. && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
  50095. && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
  50096. && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
  50097. && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
  50098. && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
  50099. && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
  50100. && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
  50101. && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
  50102. && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
  50103. && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
  50104. && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
  50105. && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
  50106. && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
  50107. && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
  50108. && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
  50109. && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
  50110. && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
  50111. && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
  50112. && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
  50113. && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments );
  50114. }
  50115. bool operator!=( PhysicalDeviceDescriptorIndexingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50116. {
  50117. return !operator==( rhs );
  50118. }
  50119. #endif
  50120. public:
  50121. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDescriptorIndexingProperties;
  50122. void* pNext = {};
  50123. uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
  50124. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
  50125. VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
  50126. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
  50127. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
  50128. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
  50129. VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
  50130. VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
  50131. uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
  50132. uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
  50133. uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
  50134. uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
  50135. uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
  50136. uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
  50137. uint32_t maxPerStageUpdateAfterBindResources = {};
  50138. uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
  50139. uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
  50140. uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
  50141. uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
  50142. uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
  50143. uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
  50144. uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
  50145. uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
  50146. };
  50147. static_assert( sizeof( PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ), "struct and wrapper have different size!" );
  50148. static_assert( std::is_standard_layout<PhysicalDeviceDescriptorIndexingProperties>::value, "struct wrapper is not a standard layout!" );
  50149. template <>
  50150. struct CppType<StructureType, StructureType::ePhysicalDeviceDescriptorIndexingProperties>
  50151. {
  50152. using Type = PhysicalDeviceDescriptorIndexingProperties;
  50153. };
  50154. using PhysicalDeviceDescriptorIndexingPropertiesEXT = PhysicalDeviceDescriptorIndexingProperties;
  50155. struct PhysicalDeviceDeviceGeneratedCommandsFeaturesNV
  50156. {
  50157. static const bool allowDuplicate = false;
  50158. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  50159. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50160. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ = {}) VULKAN_HPP_NOEXCEPT
  50161. : deviceGeneratedCommands( deviceGeneratedCommands_ )
  50162. {}
  50163. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50164. PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50165. : PhysicalDeviceDeviceGeneratedCommandsFeaturesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs ) )
  50166. {}
  50167. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50168. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50169. PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50170. {
  50171. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const *>( &rhs );
  50172. return *this;
  50173. }
  50174. PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50175. {
  50176. pNext = pNext_;
  50177. return *this;
  50178. }
  50179. PhysicalDeviceDeviceGeneratedCommandsFeaturesNV & setDeviceGeneratedCommands( VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands_ ) VULKAN_HPP_NOEXCEPT
  50180. {
  50181. deviceGeneratedCommands = deviceGeneratedCommands_;
  50182. return *this;
  50183. }
  50184. operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  50185. {
  50186. return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
  50187. }
  50188. operator VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV &() VULKAN_HPP_NOEXCEPT
  50189. {
  50190. return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV*>( this );
  50191. }
  50192. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50193. auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& ) const = default;
  50194. #else
  50195. bool operator==( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50196. {
  50197. return ( sType == rhs.sType )
  50198. && ( pNext == rhs.pNext )
  50199. && ( deviceGeneratedCommands == rhs.deviceGeneratedCommands );
  50200. }
  50201. bool operator!=( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50202. {
  50203. return !operator==( rhs );
  50204. }
  50205. #endif
  50206. public:
  50207. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  50208. void* pNext = {};
  50209. VULKAN_HPP_NAMESPACE::Bool32 deviceGeneratedCommands = {};
  50210. };
  50211. static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ), "struct and wrapper have different size!" );
  50212. static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  50213. template <>
  50214. struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV>
  50215. {
  50216. using Type = PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
  50217. };
  50218. struct PhysicalDeviceDeviceGeneratedCommandsPropertiesNV
  50219. {
  50220. static const bool allowDuplicate = false;
  50221. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  50222. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50223. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV(uint32_t maxGraphicsShaderGroupCount_ = {}, uint32_t maxIndirectSequenceCount_ = {}, uint32_t maxIndirectCommandsTokenCount_ = {}, uint32_t maxIndirectCommandsStreamCount_ = {}, uint32_t maxIndirectCommandsTokenOffset_ = {}, uint32_t maxIndirectCommandsStreamStride_ = {}, uint32_t minSequencesCountBufferOffsetAlignment_ = {}, uint32_t minSequencesIndexBufferOffsetAlignment_ = {}, uint32_t minIndirectCommandsBufferOffsetAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  50224. : maxGraphicsShaderGroupCount( maxGraphicsShaderGroupCount_ ), maxIndirectSequenceCount( maxIndirectSequenceCount_ ), maxIndirectCommandsTokenCount( maxIndirectCommandsTokenCount_ ), maxIndirectCommandsStreamCount( maxIndirectCommandsStreamCount_ ), maxIndirectCommandsTokenOffset( maxIndirectCommandsTokenOffset_ ), maxIndirectCommandsStreamStride( maxIndirectCommandsStreamStride_ ), minSequencesCountBufferOffsetAlignment( minSequencesCountBufferOffsetAlignment_ ), minSequencesIndexBufferOffsetAlignment( minSequencesIndexBufferOffsetAlignment_ ), minIndirectCommandsBufferOffsetAlignment( minIndirectCommandsBufferOffsetAlignment_ )
  50225. {}
  50226. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50227. PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50228. : PhysicalDeviceDeviceGeneratedCommandsPropertiesNV( *reinterpret_cast<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs ) )
  50229. {}
  50230. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50231. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50232. PhysicalDeviceDeviceGeneratedCommandsPropertiesNV & operator=( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50233. {
  50234. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const *>( &rhs );
  50235. return *this;
  50236. }
  50237. operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  50238. {
  50239. return *reinterpret_cast<const VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
  50240. }
  50241. operator VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV &() VULKAN_HPP_NOEXCEPT
  50242. {
  50243. return *reinterpret_cast<VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV*>( this );
  50244. }
  50245. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50246. auto operator<=>( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& ) const = default;
  50247. #else
  50248. bool operator==( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50249. {
  50250. return ( sType == rhs.sType )
  50251. && ( pNext == rhs.pNext )
  50252. && ( maxGraphicsShaderGroupCount == rhs.maxGraphicsShaderGroupCount )
  50253. && ( maxIndirectSequenceCount == rhs.maxIndirectSequenceCount )
  50254. && ( maxIndirectCommandsTokenCount == rhs.maxIndirectCommandsTokenCount )
  50255. && ( maxIndirectCommandsStreamCount == rhs.maxIndirectCommandsStreamCount )
  50256. && ( maxIndirectCommandsTokenOffset == rhs.maxIndirectCommandsTokenOffset )
  50257. && ( maxIndirectCommandsStreamStride == rhs.maxIndirectCommandsStreamStride )
  50258. && ( minSequencesCountBufferOffsetAlignment == rhs.minSequencesCountBufferOffsetAlignment )
  50259. && ( minSequencesIndexBufferOffsetAlignment == rhs.minSequencesIndexBufferOffsetAlignment )
  50260. && ( minIndirectCommandsBufferOffsetAlignment == rhs.minIndirectCommandsBufferOffsetAlignment );
  50261. }
  50262. bool operator!=( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50263. {
  50264. return !operator==( rhs );
  50265. }
  50266. #endif
  50267. public:
  50268. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  50269. void* pNext = {};
  50270. uint32_t maxGraphicsShaderGroupCount = {};
  50271. uint32_t maxIndirectSequenceCount = {};
  50272. uint32_t maxIndirectCommandsTokenCount = {};
  50273. uint32_t maxIndirectCommandsStreamCount = {};
  50274. uint32_t maxIndirectCommandsTokenOffset = {};
  50275. uint32_t maxIndirectCommandsStreamStride = {};
  50276. uint32_t minSequencesCountBufferOffsetAlignment = {};
  50277. uint32_t minSequencesIndexBufferOffsetAlignment = {};
  50278. uint32_t minIndirectCommandsBufferOffsetAlignment = {};
  50279. };
  50280. static_assert( sizeof( PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) == sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ), "struct and wrapper have different size!" );
  50281. static_assert( std::is_standard_layout<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  50282. template <>
  50283. struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV>
  50284. {
  50285. using Type = PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
  50286. };
  50287. struct PhysicalDeviceDeviceMemoryReportFeaturesEXT
  50288. {
  50289. static const bool allowDuplicate = false;
  50290. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
  50291. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50292. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ = {}) VULKAN_HPP_NOEXCEPT
  50293. : deviceMemoryReport( deviceMemoryReport_ )
  50294. {}
  50295. VULKAN_HPP_CONSTEXPR PhysicalDeviceDeviceMemoryReportFeaturesEXT( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50296. PhysicalDeviceDeviceMemoryReportFeaturesEXT( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50297. : PhysicalDeviceDeviceMemoryReportFeaturesEXT( *reinterpret_cast<PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs ) )
  50298. {}
  50299. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50300. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50301. PhysicalDeviceDeviceMemoryReportFeaturesEXT & operator=( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50302. {
  50303. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT const *>( &rhs );
  50304. return *this;
  50305. }
  50306. PhysicalDeviceDeviceMemoryReportFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50307. {
  50308. pNext = pNext_;
  50309. return *this;
  50310. }
  50311. PhysicalDeviceDeviceMemoryReportFeaturesEXT & setDeviceMemoryReport( VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport_ ) VULKAN_HPP_NOEXCEPT
  50312. {
  50313. deviceMemoryReport = deviceMemoryReport_;
  50314. return *this;
  50315. }
  50316. operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  50317. {
  50318. return *reinterpret_cast<const VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
  50319. }
  50320. operator VkPhysicalDeviceDeviceMemoryReportFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  50321. {
  50322. return *reinterpret_cast<VkPhysicalDeviceDeviceMemoryReportFeaturesEXT*>( this );
  50323. }
  50324. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50325. auto operator<=>( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& ) const = default;
  50326. #else
  50327. bool operator==( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50328. {
  50329. return ( sType == rhs.sType )
  50330. && ( pNext == rhs.pNext )
  50331. && ( deviceMemoryReport == rhs.deviceMemoryReport );
  50332. }
  50333. bool operator!=( PhysicalDeviceDeviceMemoryReportFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50334. {
  50335. return !operator==( rhs );
  50336. }
  50337. #endif
  50338. public:
  50339. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT;
  50340. void* pNext = {};
  50341. VULKAN_HPP_NAMESPACE::Bool32 deviceMemoryReport = {};
  50342. };
  50343. static_assert( sizeof( PhysicalDeviceDeviceMemoryReportFeaturesEXT ) == sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ), "struct and wrapper have different size!" );
  50344. static_assert( std::is_standard_layout<PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  50345. template <>
  50346. struct CppType<StructureType, StructureType::ePhysicalDeviceDeviceMemoryReportFeaturesEXT>
  50347. {
  50348. using Type = PhysicalDeviceDeviceMemoryReportFeaturesEXT;
  50349. };
  50350. struct PhysicalDeviceDiagnosticsConfigFeaturesNV
  50351. {
  50352. static const bool allowDuplicate = false;
  50353. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
  50354. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50355. VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ = {}) VULKAN_HPP_NOEXCEPT
  50356. : diagnosticsConfig( diagnosticsConfig_ )
  50357. {}
  50358. VULKAN_HPP_CONSTEXPR PhysicalDeviceDiagnosticsConfigFeaturesNV( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50359. PhysicalDeviceDiagnosticsConfigFeaturesNV( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50360. : PhysicalDeviceDiagnosticsConfigFeaturesNV( *reinterpret_cast<PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs ) )
  50361. {}
  50362. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50363. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( PhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50364. PhysicalDeviceDiagnosticsConfigFeaturesNV & operator=( VkPhysicalDeviceDiagnosticsConfigFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50365. {
  50366. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV const *>( &rhs );
  50367. return *this;
  50368. }
  50369. PhysicalDeviceDiagnosticsConfigFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50370. {
  50371. pNext = pNext_;
  50372. return *this;
  50373. }
  50374. PhysicalDeviceDiagnosticsConfigFeaturesNV & setDiagnosticsConfig( VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig_ ) VULKAN_HPP_NOEXCEPT
  50375. {
  50376. diagnosticsConfig = diagnosticsConfig_;
  50377. return *this;
  50378. }
  50379. operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  50380. {
  50381. return *reinterpret_cast<const VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
  50382. }
  50383. operator VkPhysicalDeviceDiagnosticsConfigFeaturesNV &() VULKAN_HPP_NOEXCEPT
  50384. {
  50385. return *reinterpret_cast<VkPhysicalDeviceDiagnosticsConfigFeaturesNV*>( this );
  50386. }
  50387. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50388. auto operator<=>( PhysicalDeviceDiagnosticsConfigFeaturesNV const& ) const = default;
  50389. #else
  50390. bool operator==( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50391. {
  50392. return ( sType == rhs.sType )
  50393. && ( pNext == rhs.pNext )
  50394. && ( diagnosticsConfig == rhs.diagnosticsConfig );
  50395. }
  50396. bool operator!=( PhysicalDeviceDiagnosticsConfigFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50397. {
  50398. return !operator==( rhs );
  50399. }
  50400. #endif
  50401. public:
  50402. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV;
  50403. void* pNext = {};
  50404. VULKAN_HPP_NAMESPACE::Bool32 diagnosticsConfig = {};
  50405. };
  50406. static_assert( sizeof( PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ), "struct and wrapper have different size!" );
  50407. static_assert( std::is_standard_layout<PhysicalDeviceDiagnosticsConfigFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  50408. template <>
  50409. struct CppType<StructureType, StructureType::ePhysicalDeviceDiagnosticsConfigFeaturesNV>
  50410. {
  50411. using Type = PhysicalDeviceDiagnosticsConfigFeaturesNV;
  50412. };
  50413. struct PhysicalDeviceDiscardRectanglePropertiesEXT
  50414. {
  50415. static const bool allowDuplicate = false;
  50416. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
  50417. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50418. VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT(uint32_t maxDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
  50419. : maxDiscardRectangles( maxDiscardRectangles_ )
  50420. {}
  50421. VULKAN_HPP_CONSTEXPR PhysicalDeviceDiscardRectanglePropertiesEXT( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50422. PhysicalDeviceDiscardRectanglePropertiesEXT( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50423. : PhysicalDeviceDiscardRectanglePropertiesEXT( *reinterpret_cast<PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs ) )
  50424. {}
  50425. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50426. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( PhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50427. PhysicalDeviceDiscardRectanglePropertiesEXT & operator=( VkPhysicalDeviceDiscardRectanglePropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50428. {
  50429. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT const *>( &rhs );
  50430. return *this;
  50431. }
  50432. operator VkPhysicalDeviceDiscardRectanglePropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  50433. {
  50434. return *reinterpret_cast<const VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
  50435. }
  50436. operator VkPhysicalDeviceDiscardRectanglePropertiesEXT &() VULKAN_HPP_NOEXCEPT
  50437. {
  50438. return *reinterpret_cast<VkPhysicalDeviceDiscardRectanglePropertiesEXT*>( this );
  50439. }
  50440. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50441. auto operator<=>( PhysicalDeviceDiscardRectanglePropertiesEXT const& ) const = default;
  50442. #else
  50443. bool operator==( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50444. {
  50445. return ( sType == rhs.sType )
  50446. && ( pNext == rhs.pNext )
  50447. && ( maxDiscardRectangles == rhs.maxDiscardRectangles );
  50448. }
  50449. bool operator!=( PhysicalDeviceDiscardRectanglePropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50450. {
  50451. return !operator==( rhs );
  50452. }
  50453. #endif
  50454. public:
  50455. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT;
  50456. void* pNext = {};
  50457. uint32_t maxDiscardRectangles = {};
  50458. };
  50459. static_assert( sizeof( PhysicalDeviceDiscardRectanglePropertiesEXT ) == sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ), "struct and wrapper have different size!" );
  50460. static_assert( std::is_standard_layout<PhysicalDeviceDiscardRectanglePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  50461. template <>
  50462. struct CppType<StructureType, StructureType::ePhysicalDeviceDiscardRectanglePropertiesEXT>
  50463. {
  50464. using Type = PhysicalDeviceDiscardRectanglePropertiesEXT;
  50465. };
  50466. struct PhysicalDeviceDriverProperties
  50467. {
  50468. static const bool allowDuplicate = false;
  50469. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceDriverProperties;
  50470. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50471. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}) VULKAN_HPP_NOEXCEPT
  50472. : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ )
  50473. {}
  50474. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50475. PhysicalDeviceDriverProperties( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50476. : PhysicalDeviceDriverProperties( *reinterpret_cast<PhysicalDeviceDriverProperties const *>( &rhs ) )
  50477. {}
  50478. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50479. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceDriverProperties & operator=( PhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50480. PhysicalDeviceDriverProperties & operator=( VkPhysicalDeviceDriverProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50481. {
  50482. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties const *>( &rhs );
  50483. return *this;
  50484. }
  50485. operator VkPhysicalDeviceDriverProperties const&() const VULKAN_HPP_NOEXCEPT
  50486. {
  50487. return *reinterpret_cast<const VkPhysicalDeviceDriverProperties*>( this );
  50488. }
  50489. operator VkPhysicalDeviceDriverProperties &() VULKAN_HPP_NOEXCEPT
  50490. {
  50491. return *reinterpret_cast<VkPhysicalDeviceDriverProperties*>( this );
  50492. }
  50493. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50494. auto operator<=>( PhysicalDeviceDriverProperties const& ) const = default;
  50495. #else
  50496. bool operator==( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50497. {
  50498. return ( sType == rhs.sType )
  50499. && ( pNext == rhs.pNext )
  50500. && ( driverID == rhs.driverID )
  50501. && ( driverName == rhs.driverName )
  50502. && ( driverInfo == rhs.driverInfo )
  50503. && ( conformanceVersion == rhs.conformanceVersion );
  50504. }
  50505. bool operator!=( PhysicalDeviceDriverProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50506. {
  50507. return !operator==( rhs );
  50508. }
  50509. #endif
  50510. public:
  50511. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceDriverProperties;
  50512. void* pNext = {};
  50513. VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
  50514. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
  50515. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
  50516. VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
  50517. };
  50518. static_assert( sizeof( PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ), "struct and wrapper have different size!" );
  50519. static_assert( std::is_standard_layout<PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
  50520. template <>
  50521. struct CppType<StructureType, StructureType::ePhysicalDeviceDriverProperties>
  50522. {
  50523. using Type = PhysicalDeviceDriverProperties;
  50524. };
  50525. using PhysicalDeviceDriverPropertiesKHR = PhysicalDeviceDriverProperties;
  50526. struct PhysicalDeviceExclusiveScissorFeaturesNV
  50527. {
  50528. static const bool allowDuplicate = false;
  50529. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
  50530. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50531. VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ = {}) VULKAN_HPP_NOEXCEPT
  50532. : exclusiveScissor( exclusiveScissor_ )
  50533. {}
  50534. VULKAN_HPP_CONSTEXPR PhysicalDeviceExclusiveScissorFeaturesNV( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50535. PhysicalDeviceExclusiveScissorFeaturesNV( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50536. : PhysicalDeviceExclusiveScissorFeaturesNV( *reinterpret_cast<PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs ) )
  50537. {}
  50538. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50539. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExclusiveScissorFeaturesNV & operator=( PhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50540. PhysicalDeviceExclusiveScissorFeaturesNV & operator=( VkPhysicalDeviceExclusiveScissorFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  50541. {
  50542. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV const *>( &rhs );
  50543. return *this;
  50544. }
  50545. PhysicalDeviceExclusiveScissorFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50546. {
  50547. pNext = pNext_;
  50548. return *this;
  50549. }
  50550. PhysicalDeviceExclusiveScissorFeaturesNV & setExclusiveScissor( VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor_ ) VULKAN_HPP_NOEXCEPT
  50551. {
  50552. exclusiveScissor = exclusiveScissor_;
  50553. return *this;
  50554. }
  50555. operator VkPhysicalDeviceExclusiveScissorFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  50556. {
  50557. return *reinterpret_cast<const VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
  50558. }
  50559. operator VkPhysicalDeviceExclusiveScissorFeaturesNV &() VULKAN_HPP_NOEXCEPT
  50560. {
  50561. return *reinterpret_cast<VkPhysicalDeviceExclusiveScissorFeaturesNV*>( this );
  50562. }
  50563. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50564. auto operator<=>( PhysicalDeviceExclusiveScissorFeaturesNV const& ) const = default;
  50565. #else
  50566. bool operator==( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50567. {
  50568. return ( sType == rhs.sType )
  50569. && ( pNext == rhs.pNext )
  50570. && ( exclusiveScissor == rhs.exclusiveScissor );
  50571. }
  50572. bool operator!=( PhysicalDeviceExclusiveScissorFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  50573. {
  50574. return !operator==( rhs );
  50575. }
  50576. #endif
  50577. public:
  50578. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV;
  50579. void* pNext = {};
  50580. VULKAN_HPP_NAMESPACE::Bool32 exclusiveScissor = {};
  50581. };
  50582. static_assert( sizeof( PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ), "struct and wrapper have different size!" );
  50583. static_assert( std::is_standard_layout<PhysicalDeviceExclusiveScissorFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  50584. template <>
  50585. struct CppType<StructureType, StructureType::ePhysicalDeviceExclusiveScissorFeaturesNV>
  50586. {
  50587. using Type = PhysicalDeviceExclusiveScissorFeaturesNV;
  50588. };
  50589. struct PhysicalDeviceExtendedDynamicStateFeaturesEXT
  50590. {
  50591. static const bool allowDuplicate = false;
  50592. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
  50593. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50594. VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ = {}) VULKAN_HPP_NOEXCEPT
  50595. : extendedDynamicState( extendedDynamicState_ )
  50596. {}
  50597. VULKAN_HPP_CONSTEXPR PhysicalDeviceExtendedDynamicStateFeaturesEXT( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50598. PhysicalDeviceExtendedDynamicStateFeaturesEXT( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50599. : PhysicalDeviceExtendedDynamicStateFeaturesEXT( *reinterpret_cast<PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs ) )
  50600. {}
  50601. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50602. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50603. PhysicalDeviceExtendedDynamicStateFeaturesEXT & operator=( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50604. {
  50605. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT const *>( &rhs );
  50606. return *this;
  50607. }
  50608. PhysicalDeviceExtendedDynamicStateFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50609. {
  50610. pNext = pNext_;
  50611. return *this;
  50612. }
  50613. PhysicalDeviceExtendedDynamicStateFeaturesEXT & setExtendedDynamicState( VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState_ ) VULKAN_HPP_NOEXCEPT
  50614. {
  50615. extendedDynamicState = extendedDynamicState_;
  50616. return *this;
  50617. }
  50618. operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  50619. {
  50620. return *reinterpret_cast<const VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
  50621. }
  50622. operator VkPhysicalDeviceExtendedDynamicStateFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  50623. {
  50624. return *reinterpret_cast<VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*>( this );
  50625. }
  50626. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50627. auto operator<=>( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& ) const = default;
  50628. #else
  50629. bool operator==( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50630. {
  50631. return ( sType == rhs.sType )
  50632. && ( pNext == rhs.pNext )
  50633. && ( extendedDynamicState == rhs.extendedDynamicState );
  50634. }
  50635. bool operator!=( PhysicalDeviceExtendedDynamicStateFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50636. {
  50637. return !operator==( rhs );
  50638. }
  50639. #endif
  50640. public:
  50641. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT;
  50642. void* pNext = {};
  50643. VULKAN_HPP_NAMESPACE::Bool32 extendedDynamicState = {};
  50644. };
  50645. static_assert( sizeof( PhysicalDeviceExtendedDynamicStateFeaturesEXT ) == sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ), "struct and wrapper have different size!" );
  50646. static_assert( std::is_standard_layout<PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  50647. template <>
  50648. struct CppType<StructureType, StructureType::ePhysicalDeviceExtendedDynamicStateFeaturesEXT>
  50649. {
  50650. using Type = PhysicalDeviceExtendedDynamicStateFeaturesEXT;
  50651. };
  50652. struct PhysicalDeviceExternalImageFormatInfo
  50653. {
  50654. static const bool allowDuplicate = false;
  50655. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
  50656. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50657. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo(VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd) VULKAN_HPP_NOEXCEPT
  50658. : handleType( handleType_ )
  50659. {}
  50660. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalImageFormatInfo( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50661. PhysicalDeviceExternalImageFormatInfo( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  50662. : PhysicalDeviceExternalImageFormatInfo( *reinterpret_cast<PhysicalDeviceExternalImageFormatInfo const *>( &rhs ) )
  50663. {}
  50664. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50665. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalImageFormatInfo & operator=( PhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50666. PhysicalDeviceExternalImageFormatInfo & operator=( VkPhysicalDeviceExternalImageFormatInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  50667. {
  50668. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo const *>( &rhs );
  50669. return *this;
  50670. }
  50671. PhysicalDeviceExternalImageFormatInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50672. {
  50673. pNext = pNext_;
  50674. return *this;
  50675. }
  50676. PhysicalDeviceExternalImageFormatInfo & setHandleType( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType_ ) VULKAN_HPP_NOEXCEPT
  50677. {
  50678. handleType = handleType_;
  50679. return *this;
  50680. }
  50681. operator VkPhysicalDeviceExternalImageFormatInfo const&() const VULKAN_HPP_NOEXCEPT
  50682. {
  50683. return *reinterpret_cast<const VkPhysicalDeviceExternalImageFormatInfo*>( this );
  50684. }
  50685. operator VkPhysicalDeviceExternalImageFormatInfo &() VULKAN_HPP_NOEXCEPT
  50686. {
  50687. return *reinterpret_cast<VkPhysicalDeviceExternalImageFormatInfo*>( this );
  50688. }
  50689. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50690. auto operator<=>( PhysicalDeviceExternalImageFormatInfo const& ) const = default;
  50691. #else
  50692. bool operator==( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  50693. {
  50694. return ( sType == rhs.sType )
  50695. && ( pNext == rhs.pNext )
  50696. && ( handleType == rhs.handleType );
  50697. }
  50698. bool operator!=( PhysicalDeviceExternalImageFormatInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  50699. {
  50700. return !operator==( rhs );
  50701. }
  50702. #endif
  50703. public:
  50704. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalImageFormatInfo;
  50705. const void* pNext = {};
  50706. VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType = VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits::eOpaqueFd;
  50707. };
  50708. static_assert( sizeof( PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ), "struct and wrapper have different size!" );
  50709. static_assert( std::is_standard_layout<PhysicalDeviceExternalImageFormatInfo>::value, "struct wrapper is not a standard layout!" );
  50710. template <>
  50711. struct CppType<StructureType, StructureType::ePhysicalDeviceExternalImageFormatInfo>
  50712. {
  50713. using Type = PhysicalDeviceExternalImageFormatInfo;
  50714. };
  50715. using PhysicalDeviceExternalImageFormatInfoKHR = PhysicalDeviceExternalImageFormatInfo;
  50716. struct PhysicalDeviceExternalMemoryHostPropertiesEXT
  50717. {
  50718. static const bool allowDuplicate = false;
  50719. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
  50720. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50721. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  50722. : minImportedHostPointerAlignment( minImportedHostPointerAlignment_ )
  50723. {}
  50724. VULKAN_HPP_CONSTEXPR PhysicalDeviceExternalMemoryHostPropertiesEXT( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50725. PhysicalDeviceExternalMemoryHostPropertiesEXT( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50726. : PhysicalDeviceExternalMemoryHostPropertiesEXT( *reinterpret_cast<PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs ) )
  50727. {}
  50728. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50729. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( PhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50730. PhysicalDeviceExternalMemoryHostPropertiesEXT & operator=( VkPhysicalDeviceExternalMemoryHostPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50731. {
  50732. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT const *>( &rhs );
  50733. return *this;
  50734. }
  50735. operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  50736. {
  50737. return *reinterpret_cast<const VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
  50738. }
  50739. operator VkPhysicalDeviceExternalMemoryHostPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  50740. {
  50741. return *reinterpret_cast<VkPhysicalDeviceExternalMemoryHostPropertiesEXT*>( this );
  50742. }
  50743. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50744. auto operator<=>( PhysicalDeviceExternalMemoryHostPropertiesEXT const& ) const = default;
  50745. #else
  50746. bool operator==( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50747. {
  50748. return ( sType == rhs.sType )
  50749. && ( pNext == rhs.pNext )
  50750. && ( minImportedHostPointerAlignment == rhs.minImportedHostPointerAlignment );
  50751. }
  50752. bool operator!=( PhysicalDeviceExternalMemoryHostPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50753. {
  50754. return !operator==( rhs );
  50755. }
  50756. #endif
  50757. public:
  50758. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT;
  50759. void* pNext = {};
  50760. VULKAN_HPP_NAMESPACE::DeviceSize minImportedHostPointerAlignment = {};
  50761. };
  50762. static_assert( sizeof( PhysicalDeviceExternalMemoryHostPropertiesEXT ) == sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ), "struct and wrapper have different size!" );
  50763. static_assert( std::is_standard_layout<PhysicalDeviceExternalMemoryHostPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  50764. template <>
  50765. struct CppType<StructureType, StructureType::ePhysicalDeviceExternalMemoryHostPropertiesEXT>
  50766. {
  50767. using Type = PhysicalDeviceExternalMemoryHostPropertiesEXT;
  50768. };
  50769. struct PhysicalDeviceFloatControlsProperties
  50770. {
  50771. static const bool allowDuplicate = false;
  50772. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFloatControlsProperties;
  50773. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50774. VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties(VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}) VULKAN_HPP_NOEXCEPT
  50775. : denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ )
  50776. {}
  50777. VULKAN_HPP_CONSTEXPR PhysicalDeviceFloatControlsProperties( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50778. PhysicalDeviceFloatControlsProperties( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50779. : PhysicalDeviceFloatControlsProperties( *reinterpret_cast<PhysicalDeviceFloatControlsProperties const *>( &rhs ) )
  50780. {}
  50781. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50782. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFloatControlsProperties & operator=( PhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50783. PhysicalDeviceFloatControlsProperties & operator=( VkPhysicalDeviceFloatControlsProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  50784. {
  50785. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties const *>( &rhs );
  50786. return *this;
  50787. }
  50788. operator VkPhysicalDeviceFloatControlsProperties const&() const VULKAN_HPP_NOEXCEPT
  50789. {
  50790. return *reinterpret_cast<const VkPhysicalDeviceFloatControlsProperties*>( this );
  50791. }
  50792. operator VkPhysicalDeviceFloatControlsProperties &() VULKAN_HPP_NOEXCEPT
  50793. {
  50794. return *reinterpret_cast<VkPhysicalDeviceFloatControlsProperties*>( this );
  50795. }
  50796. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50797. auto operator<=>( PhysicalDeviceFloatControlsProperties const& ) const = default;
  50798. #else
  50799. bool operator==( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50800. {
  50801. return ( sType == rhs.sType )
  50802. && ( pNext == rhs.pNext )
  50803. && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
  50804. && ( roundingModeIndependence == rhs.roundingModeIndependence )
  50805. && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
  50806. && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
  50807. && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
  50808. && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
  50809. && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
  50810. && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
  50811. && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
  50812. && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
  50813. && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
  50814. && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
  50815. && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
  50816. && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
  50817. && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
  50818. && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
  50819. && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 );
  50820. }
  50821. bool operator!=( PhysicalDeviceFloatControlsProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  50822. {
  50823. return !operator==( rhs );
  50824. }
  50825. #endif
  50826. public:
  50827. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFloatControlsProperties;
  50828. void* pNext = {};
  50829. VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
  50830. VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
  50831. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
  50832. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
  50833. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
  50834. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
  50835. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
  50836. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
  50837. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
  50838. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
  50839. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
  50840. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
  50841. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
  50842. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
  50843. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
  50844. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
  50845. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
  50846. };
  50847. static_assert( sizeof( PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ), "struct and wrapper have different size!" );
  50848. static_assert( std::is_standard_layout<PhysicalDeviceFloatControlsProperties>::value, "struct wrapper is not a standard layout!" );
  50849. template <>
  50850. struct CppType<StructureType, StructureType::ePhysicalDeviceFloatControlsProperties>
  50851. {
  50852. using Type = PhysicalDeviceFloatControlsProperties;
  50853. };
  50854. using PhysicalDeviceFloatControlsPropertiesKHR = PhysicalDeviceFloatControlsProperties;
  50855. struct PhysicalDeviceFragmentDensityMap2FeaturesEXT
  50856. {
  50857. static const bool allowDuplicate = false;
  50858. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
  50859. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50860. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ = {}) VULKAN_HPP_NOEXCEPT
  50861. : fragmentDensityMapDeferred( fragmentDensityMapDeferred_ )
  50862. {}
  50863. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2FeaturesEXT( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50864. PhysicalDeviceFragmentDensityMap2FeaturesEXT( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50865. : PhysicalDeviceFragmentDensityMap2FeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs ) )
  50866. {}
  50867. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50868. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50869. PhysicalDeviceFragmentDensityMap2FeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50870. {
  50871. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT const *>( &rhs );
  50872. return *this;
  50873. }
  50874. PhysicalDeviceFragmentDensityMap2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50875. {
  50876. pNext = pNext_;
  50877. return *this;
  50878. }
  50879. PhysicalDeviceFragmentDensityMap2FeaturesEXT & setFragmentDensityMapDeferred( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred_ ) VULKAN_HPP_NOEXCEPT
  50880. {
  50881. fragmentDensityMapDeferred = fragmentDensityMapDeferred_;
  50882. return *this;
  50883. }
  50884. operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  50885. {
  50886. return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
  50887. }
  50888. operator VkPhysicalDeviceFragmentDensityMap2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
  50889. {
  50890. return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2FeaturesEXT*>( this );
  50891. }
  50892. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50893. auto operator<=>( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& ) const = default;
  50894. #else
  50895. bool operator==( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50896. {
  50897. return ( sType == rhs.sType )
  50898. && ( pNext == rhs.pNext )
  50899. && ( fragmentDensityMapDeferred == rhs.fragmentDensityMapDeferred );
  50900. }
  50901. bool operator!=( PhysicalDeviceFragmentDensityMap2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50902. {
  50903. return !operator==( rhs );
  50904. }
  50905. #endif
  50906. public:
  50907. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT;
  50908. void* pNext = {};
  50909. VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDeferred = {};
  50910. };
  50911. static_assert( sizeof( PhysicalDeviceFragmentDensityMap2FeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ), "struct and wrapper have different size!" );
  50912. static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  50913. template <>
  50914. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2FeaturesEXT>
  50915. {
  50916. using Type = PhysicalDeviceFragmentDensityMap2FeaturesEXT;
  50917. };
  50918. struct PhysicalDeviceFragmentDensityMap2PropertiesEXT
  50919. {
  50920. static const bool allowDuplicate = false;
  50921. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
  50922. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50923. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT(VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess_ = {}, uint32_t maxSubsampledArrayLayers_ = {}, uint32_t maxDescriptorSetSubsampledSamplers_ = {}) VULKAN_HPP_NOEXCEPT
  50924. : subsampledLoads( subsampledLoads_ ), subsampledCoarseReconstructionEarlyAccess( subsampledCoarseReconstructionEarlyAccess_ ), maxSubsampledArrayLayers( maxSubsampledArrayLayers_ ), maxDescriptorSetSubsampledSamplers( maxDescriptorSetSubsampledSamplers_ )
  50925. {}
  50926. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMap2PropertiesEXT( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50927. PhysicalDeviceFragmentDensityMap2PropertiesEXT( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50928. : PhysicalDeviceFragmentDensityMap2PropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs ) )
  50929. {}
  50930. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50931. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50932. PhysicalDeviceFragmentDensityMap2PropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50933. {
  50934. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT const *>( &rhs );
  50935. return *this;
  50936. }
  50937. operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  50938. {
  50939. return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
  50940. }
  50941. operator VkPhysicalDeviceFragmentDensityMap2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
  50942. {
  50943. return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMap2PropertiesEXT*>( this );
  50944. }
  50945. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  50946. auto operator<=>( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& ) const = default;
  50947. #else
  50948. bool operator==( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50949. {
  50950. return ( sType == rhs.sType )
  50951. && ( pNext == rhs.pNext )
  50952. && ( subsampledLoads == rhs.subsampledLoads )
  50953. && ( subsampledCoarseReconstructionEarlyAccess == rhs.subsampledCoarseReconstructionEarlyAccess )
  50954. && ( maxSubsampledArrayLayers == rhs.maxSubsampledArrayLayers )
  50955. && ( maxDescriptorSetSubsampledSamplers == rhs.maxDescriptorSetSubsampledSamplers );
  50956. }
  50957. bool operator!=( PhysicalDeviceFragmentDensityMap2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  50958. {
  50959. return !operator==( rhs );
  50960. }
  50961. #endif
  50962. public:
  50963. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT;
  50964. void* pNext = {};
  50965. VULKAN_HPP_NAMESPACE::Bool32 subsampledLoads = {};
  50966. VULKAN_HPP_NAMESPACE::Bool32 subsampledCoarseReconstructionEarlyAccess = {};
  50967. uint32_t maxSubsampledArrayLayers = {};
  50968. uint32_t maxDescriptorSetSubsampledSamplers = {};
  50969. };
  50970. static_assert( sizeof( PhysicalDeviceFragmentDensityMap2PropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ), "struct and wrapper have different size!" );
  50971. static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  50972. template <>
  50973. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMap2PropertiesEXT>
  50974. {
  50975. using Type = PhysicalDeviceFragmentDensityMap2PropertiesEXT;
  50976. };
  50977. struct PhysicalDeviceFragmentDensityMapFeaturesEXT
  50978. {
  50979. static const bool allowDuplicate = false;
  50980. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
  50981. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50982. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ = {}) VULKAN_HPP_NOEXCEPT
  50983. : fragmentDensityMap( fragmentDensityMap_ ), fragmentDensityMapDynamic( fragmentDensityMapDynamic_ ), fragmentDensityMapNonSubsampledImages( fragmentDensityMapNonSubsampledImages_ )
  50984. {}
  50985. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapFeaturesEXT( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50986. PhysicalDeviceFragmentDensityMapFeaturesEXT( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50987. : PhysicalDeviceFragmentDensityMapFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs ) )
  50988. {}
  50989. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  50990. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( PhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  50991. PhysicalDeviceFragmentDensityMapFeaturesEXT & operator=( VkPhysicalDeviceFragmentDensityMapFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  50992. {
  50993. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT const *>( &rhs );
  50994. return *this;
  50995. }
  50996. PhysicalDeviceFragmentDensityMapFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  50997. {
  50998. pNext = pNext_;
  50999. return *this;
  51000. }
  51001. PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMap( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap_ ) VULKAN_HPP_NOEXCEPT
  51002. {
  51003. fragmentDensityMap = fragmentDensityMap_;
  51004. return *this;
  51005. }
  51006. PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapDynamic( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic_ ) VULKAN_HPP_NOEXCEPT
  51007. {
  51008. fragmentDensityMapDynamic = fragmentDensityMapDynamic_;
  51009. return *this;
  51010. }
  51011. PhysicalDeviceFragmentDensityMapFeaturesEXT & setFragmentDensityMapNonSubsampledImages( VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages_ ) VULKAN_HPP_NOEXCEPT
  51012. {
  51013. fragmentDensityMapNonSubsampledImages = fragmentDensityMapNonSubsampledImages_;
  51014. return *this;
  51015. }
  51016. operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  51017. {
  51018. return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
  51019. }
  51020. operator VkPhysicalDeviceFragmentDensityMapFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  51021. {
  51022. return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapFeaturesEXT*>( this );
  51023. }
  51024. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51025. auto operator<=>( PhysicalDeviceFragmentDensityMapFeaturesEXT const& ) const = default;
  51026. #else
  51027. bool operator==( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51028. {
  51029. return ( sType == rhs.sType )
  51030. && ( pNext == rhs.pNext )
  51031. && ( fragmentDensityMap == rhs.fragmentDensityMap )
  51032. && ( fragmentDensityMapDynamic == rhs.fragmentDensityMapDynamic )
  51033. && ( fragmentDensityMapNonSubsampledImages == rhs.fragmentDensityMapNonSubsampledImages );
  51034. }
  51035. bool operator!=( PhysicalDeviceFragmentDensityMapFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51036. {
  51037. return !operator==( rhs );
  51038. }
  51039. #endif
  51040. public:
  51041. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT;
  51042. void* pNext = {};
  51043. VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMap = {};
  51044. VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapDynamic = {};
  51045. VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityMapNonSubsampledImages = {};
  51046. };
  51047. static_assert( sizeof( PhysicalDeviceFragmentDensityMapFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ), "struct and wrapper have different size!" );
  51048. static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  51049. template <>
  51050. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapFeaturesEXT>
  51051. {
  51052. using Type = PhysicalDeviceFragmentDensityMapFeaturesEXT;
  51053. };
  51054. struct PhysicalDeviceFragmentDensityMapPropertiesEXT
  51055. {
  51056. static const bool allowDuplicate = false;
  51057. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
  51058. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51059. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT(VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations_ = {}) VULKAN_HPP_NOEXCEPT
  51060. : minFragmentDensityTexelSize( minFragmentDensityTexelSize_ ), maxFragmentDensityTexelSize( maxFragmentDensityTexelSize_ ), fragmentDensityInvocations( fragmentDensityInvocations_ )
  51061. {}
  51062. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentDensityMapPropertiesEXT( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51063. PhysicalDeviceFragmentDensityMapPropertiesEXT( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51064. : PhysicalDeviceFragmentDensityMapPropertiesEXT( *reinterpret_cast<PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs ) )
  51065. {}
  51066. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51067. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( PhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51068. PhysicalDeviceFragmentDensityMapPropertiesEXT & operator=( VkPhysicalDeviceFragmentDensityMapPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51069. {
  51070. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT const *>( &rhs );
  51071. return *this;
  51072. }
  51073. operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  51074. {
  51075. return *reinterpret_cast<const VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
  51076. }
  51077. operator VkPhysicalDeviceFragmentDensityMapPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  51078. {
  51079. return *reinterpret_cast<VkPhysicalDeviceFragmentDensityMapPropertiesEXT*>( this );
  51080. }
  51081. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51082. auto operator<=>( PhysicalDeviceFragmentDensityMapPropertiesEXT const& ) const = default;
  51083. #else
  51084. bool operator==( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51085. {
  51086. return ( sType == rhs.sType )
  51087. && ( pNext == rhs.pNext )
  51088. && ( minFragmentDensityTexelSize == rhs.minFragmentDensityTexelSize )
  51089. && ( maxFragmentDensityTexelSize == rhs.maxFragmentDensityTexelSize )
  51090. && ( fragmentDensityInvocations == rhs.fragmentDensityInvocations );
  51091. }
  51092. bool operator!=( PhysicalDeviceFragmentDensityMapPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51093. {
  51094. return !operator==( rhs );
  51095. }
  51096. #endif
  51097. public:
  51098. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT;
  51099. void* pNext = {};
  51100. VULKAN_HPP_NAMESPACE::Extent2D minFragmentDensityTexelSize = {};
  51101. VULKAN_HPP_NAMESPACE::Extent2D maxFragmentDensityTexelSize = {};
  51102. VULKAN_HPP_NAMESPACE::Bool32 fragmentDensityInvocations = {};
  51103. };
  51104. static_assert( sizeof( PhysicalDeviceFragmentDensityMapPropertiesEXT ) == sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ), "struct and wrapper have different size!" );
  51105. static_assert( std::is_standard_layout<PhysicalDeviceFragmentDensityMapPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  51106. template <>
  51107. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentDensityMapPropertiesEXT>
  51108. {
  51109. using Type = PhysicalDeviceFragmentDensityMapPropertiesEXT;
  51110. };
  51111. struct PhysicalDeviceFragmentShaderBarycentricFeaturesNV
  51112. {
  51113. static const bool allowDuplicate = false;
  51114. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
  51115. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51116. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ = {}) VULKAN_HPP_NOEXCEPT
  51117. : fragmentShaderBarycentric( fragmentShaderBarycentric_ )
  51118. {}
  51119. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderBarycentricFeaturesNV( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51120. PhysicalDeviceFragmentShaderBarycentricFeaturesNV( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51121. : PhysicalDeviceFragmentShaderBarycentricFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs ) )
  51122. {}
  51123. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51124. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51125. PhysicalDeviceFragmentShaderBarycentricFeaturesNV & operator=( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51126. {
  51127. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV const *>( &rhs );
  51128. return *this;
  51129. }
  51130. PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51131. {
  51132. pNext = pNext_;
  51133. return *this;
  51134. }
  51135. PhysicalDeviceFragmentShaderBarycentricFeaturesNV & setFragmentShaderBarycentric( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric_ ) VULKAN_HPP_NOEXCEPT
  51136. {
  51137. fragmentShaderBarycentric = fragmentShaderBarycentric_;
  51138. return *this;
  51139. }
  51140. operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  51141. {
  51142. return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
  51143. }
  51144. operator VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV &() VULKAN_HPP_NOEXCEPT
  51145. {
  51146. return *reinterpret_cast<VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV*>( this );
  51147. }
  51148. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51149. auto operator<=>( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& ) const = default;
  51150. #else
  51151. bool operator==( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51152. {
  51153. return ( sType == rhs.sType )
  51154. && ( pNext == rhs.pNext )
  51155. && ( fragmentShaderBarycentric == rhs.fragmentShaderBarycentric );
  51156. }
  51157. bool operator!=( PhysicalDeviceFragmentShaderBarycentricFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51158. {
  51159. return !operator==( rhs );
  51160. }
  51161. #endif
  51162. public:
  51163. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV;
  51164. void* pNext = {};
  51165. VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderBarycentric = {};
  51166. };
  51167. static_assert( sizeof( PhysicalDeviceFragmentShaderBarycentricFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV ), "struct and wrapper have different size!" );
  51168. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderBarycentricFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  51169. template <>
  51170. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderBarycentricFeaturesNV>
  51171. {
  51172. using Type = PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
  51173. };
  51174. struct PhysicalDeviceFragmentShaderInterlockFeaturesEXT
  51175. {
  51176. static const bool allowDuplicate = false;
  51177. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  51178. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51179. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ = {}) VULKAN_HPP_NOEXCEPT
  51180. : fragmentShaderSampleInterlock( fragmentShaderSampleInterlock_ ), fragmentShaderPixelInterlock( fragmentShaderPixelInterlock_ ), fragmentShaderShadingRateInterlock( fragmentShaderShadingRateInterlock_ )
  51181. {}
  51182. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShaderInterlockFeaturesEXT( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51183. PhysicalDeviceFragmentShaderInterlockFeaturesEXT( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51184. : PhysicalDeviceFragmentShaderInterlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs ) )
  51185. {}
  51186. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51187. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51188. PhysicalDeviceFragmentShaderInterlockFeaturesEXT & operator=( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51189. {
  51190. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT const *>( &rhs );
  51191. return *this;
  51192. }
  51193. PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51194. {
  51195. pNext = pNext_;
  51196. return *this;
  51197. }
  51198. PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderSampleInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock_ ) VULKAN_HPP_NOEXCEPT
  51199. {
  51200. fragmentShaderSampleInterlock = fragmentShaderSampleInterlock_;
  51201. return *this;
  51202. }
  51203. PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderPixelInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock_ ) VULKAN_HPP_NOEXCEPT
  51204. {
  51205. fragmentShaderPixelInterlock = fragmentShaderPixelInterlock_;
  51206. return *this;
  51207. }
  51208. PhysicalDeviceFragmentShaderInterlockFeaturesEXT & setFragmentShaderShadingRateInterlock( VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock_ ) VULKAN_HPP_NOEXCEPT
  51209. {
  51210. fragmentShaderShadingRateInterlock = fragmentShaderShadingRateInterlock_;
  51211. return *this;
  51212. }
  51213. operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  51214. {
  51215. return *reinterpret_cast<const VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
  51216. }
  51217. operator VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  51218. {
  51219. return *reinterpret_cast<VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT*>( this );
  51220. }
  51221. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51222. auto operator<=>( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& ) const = default;
  51223. #else
  51224. bool operator==( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51225. {
  51226. return ( sType == rhs.sType )
  51227. && ( pNext == rhs.pNext )
  51228. && ( fragmentShaderSampleInterlock == rhs.fragmentShaderSampleInterlock )
  51229. && ( fragmentShaderPixelInterlock == rhs.fragmentShaderPixelInterlock )
  51230. && ( fragmentShaderShadingRateInterlock == rhs.fragmentShaderShadingRateInterlock );
  51231. }
  51232. bool operator!=( PhysicalDeviceFragmentShaderInterlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51233. {
  51234. return !operator==( rhs );
  51235. }
  51236. #endif
  51237. public:
  51238. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  51239. void* pNext = {};
  51240. VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderSampleInterlock = {};
  51241. VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderPixelInterlock = {};
  51242. VULKAN_HPP_NAMESPACE::Bool32 fragmentShaderShadingRateInterlock = {};
  51243. };
  51244. static_assert( sizeof( PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) == sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ), "struct and wrapper have different size!" );
  51245. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  51246. template <>
  51247. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShaderInterlockFeaturesEXT>
  51248. {
  51249. using Type = PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
  51250. };
  51251. struct PhysicalDeviceFragmentShadingRateEnumsFeaturesNV
  51252. {
  51253. static const bool allowDuplicate = false;
  51254. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  51255. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51256. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ = {}, VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ = {}, VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ = {}) VULKAN_HPP_NOEXCEPT
  51257. : fragmentShadingRateEnums( fragmentShadingRateEnums_ ), supersampleFragmentShadingRates( supersampleFragmentShadingRates_ ), noInvocationFragmentShadingRates( noInvocationFragmentShadingRates_ )
  51258. {}
  51259. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51260. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51261. : PhysicalDeviceFragmentShadingRateEnumsFeaturesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs ) )
  51262. {}
  51263. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51264. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51265. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51266. {
  51267. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const *>( &rhs );
  51268. return *this;
  51269. }
  51270. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51271. {
  51272. pNext = pNext_;
  51273. return *this;
  51274. }
  51275. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setFragmentShadingRateEnums( VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums_ ) VULKAN_HPP_NOEXCEPT
  51276. {
  51277. fragmentShadingRateEnums = fragmentShadingRateEnums_;
  51278. return *this;
  51279. }
  51280. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setSupersampleFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
  51281. {
  51282. supersampleFragmentShadingRates = supersampleFragmentShadingRates_;
  51283. return *this;
  51284. }
  51285. PhysicalDeviceFragmentShadingRateEnumsFeaturesNV & setNoInvocationFragmentShadingRates( VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates_ ) VULKAN_HPP_NOEXCEPT
  51286. {
  51287. noInvocationFragmentShadingRates = noInvocationFragmentShadingRates_;
  51288. return *this;
  51289. }
  51290. operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  51291. {
  51292. return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
  51293. }
  51294. operator VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV &() VULKAN_HPP_NOEXCEPT
  51295. {
  51296. return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV*>( this );
  51297. }
  51298. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51299. auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const& ) const = default;
  51300. #else
  51301. bool operator==( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51302. {
  51303. return ( sType == rhs.sType )
  51304. && ( pNext == rhs.pNext )
  51305. && ( fragmentShadingRateEnums == rhs.fragmentShadingRateEnums )
  51306. && ( supersampleFragmentShadingRates == rhs.supersampleFragmentShadingRates )
  51307. && ( noInvocationFragmentShadingRates == rhs.noInvocationFragmentShadingRates );
  51308. }
  51309. bool operator!=( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51310. {
  51311. return !operator==( rhs );
  51312. }
  51313. #endif
  51314. public:
  51315. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  51316. void* pNext = {};
  51317. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateEnums = {};
  51318. VULKAN_HPP_NAMESPACE::Bool32 supersampleFragmentShadingRates = {};
  51319. VULKAN_HPP_NAMESPACE::Bool32 noInvocationFragmentShadingRates = {};
  51320. };
  51321. static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ), "struct and wrapper have different size!" );
  51322. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  51323. template <>
  51324. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV>
  51325. {
  51326. using Type = PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
  51327. };
  51328. struct PhysicalDeviceFragmentShadingRateEnumsPropertiesNV
  51329. {
  51330. static const bool allowDuplicate = false;
  51331. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  51332. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51333. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV(VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1) VULKAN_HPP_NOEXCEPT
  51334. : maxFragmentShadingRateInvocationCount( maxFragmentShadingRateInvocationCount_ )
  51335. {}
  51336. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51337. PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51338. : PhysicalDeviceFragmentShadingRateEnumsPropertiesNV( *reinterpret_cast<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs ) )
  51339. {}
  51340. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51341. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51342. PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & operator=( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  51343. {
  51344. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const *>( &rhs );
  51345. return *this;
  51346. }
  51347. PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51348. {
  51349. pNext = pNext_;
  51350. return *this;
  51351. }
  51352. PhysicalDeviceFragmentShadingRateEnumsPropertiesNV & setMaxFragmentShadingRateInvocationCount( VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount_ ) VULKAN_HPP_NOEXCEPT
  51353. {
  51354. maxFragmentShadingRateInvocationCount = maxFragmentShadingRateInvocationCount_;
  51355. return *this;
  51356. }
  51357. operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  51358. {
  51359. return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
  51360. }
  51361. operator VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV &() VULKAN_HPP_NOEXCEPT
  51362. {
  51363. return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV*>( this );
  51364. }
  51365. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51366. auto operator<=>( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const& ) const = default;
  51367. #else
  51368. bool operator==( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51369. {
  51370. return ( sType == rhs.sType )
  51371. && ( pNext == rhs.pNext )
  51372. && ( maxFragmentShadingRateInvocationCount == rhs.maxFragmentShadingRateInvocationCount );
  51373. }
  51374. bool operator!=( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  51375. {
  51376. return !operator==( rhs );
  51377. }
  51378. #endif
  51379. public:
  51380. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  51381. void* pNext = {};
  51382. VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateInvocationCount = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  51383. };
  51384. static_assert( sizeof( PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) == sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ), "struct and wrapper have different size!" );
  51385. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  51386. template <>
  51387. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV>
  51388. {
  51389. using Type = PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
  51390. };
  51391. struct PhysicalDeviceFragmentShadingRateFeaturesKHR
  51392. {
  51393. static const bool allowDuplicate = false;
  51394. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
  51395. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51396. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ = {}, VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ = {}) VULKAN_HPP_NOEXCEPT
  51397. : pipelineFragmentShadingRate( pipelineFragmentShadingRate_ ), primitiveFragmentShadingRate( primitiveFragmentShadingRate_ ), attachmentFragmentShadingRate( attachmentFragmentShadingRate_ )
  51398. {}
  51399. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRateFeaturesKHR( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51400. PhysicalDeviceFragmentShadingRateFeaturesKHR( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  51401. : PhysicalDeviceFragmentShadingRateFeaturesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs ) )
  51402. {}
  51403. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51404. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( PhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51405. PhysicalDeviceFragmentShadingRateFeaturesKHR & operator=( VkPhysicalDeviceFragmentShadingRateFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  51406. {
  51407. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR const *>( &rhs );
  51408. return *this;
  51409. }
  51410. PhysicalDeviceFragmentShadingRateFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51411. {
  51412. pNext = pNext_;
  51413. return *this;
  51414. }
  51415. PhysicalDeviceFragmentShadingRateFeaturesKHR & setPipelineFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
  51416. {
  51417. pipelineFragmentShadingRate = pipelineFragmentShadingRate_;
  51418. return *this;
  51419. }
  51420. PhysicalDeviceFragmentShadingRateFeaturesKHR & setPrimitiveFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
  51421. {
  51422. primitiveFragmentShadingRate = primitiveFragmentShadingRate_;
  51423. return *this;
  51424. }
  51425. PhysicalDeviceFragmentShadingRateFeaturesKHR & setAttachmentFragmentShadingRate( VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate_ ) VULKAN_HPP_NOEXCEPT
  51426. {
  51427. attachmentFragmentShadingRate = attachmentFragmentShadingRate_;
  51428. return *this;
  51429. }
  51430. operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  51431. {
  51432. return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
  51433. }
  51434. operator VkPhysicalDeviceFragmentShadingRateFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  51435. {
  51436. return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRateFeaturesKHR*>( this );
  51437. }
  51438. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51439. auto operator<=>( PhysicalDeviceFragmentShadingRateFeaturesKHR const& ) const = default;
  51440. #else
  51441. bool operator==( PhysicalDeviceFragmentShadingRateFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  51442. {
  51443. return ( sType == rhs.sType )
  51444. && ( pNext == rhs.pNext )
  51445. && ( pipelineFragmentShadingRate == rhs.pipelineFragmentShadingRate )
  51446. && ( primitiveFragmentShadingRate == rhs.primitiveFragmentShadingRate )
  51447. && ( attachmentFragmentShadingRate == rhs.attachmentFragmentShadingRate );
  51448. }
  51449. bool operator!=( PhysicalDeviceFragmentShadingRateFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  51450. {
  51451. return !operator==( rhs );
  51452. }
  51453. #endif
  51454. public:
  51455. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR;
  51456. void* pNext = {};
  51457. VULKAN_HPP_NAMESPACE::Bool32 pipelineFragmentShadingRate = {};
  51458. VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRate = {};
  51459. VULKAN_HPP_NAMESPACE::Bool32 attachmentFragmentShadingRate = {};
  51460. };
  51461. static_assert( sizeof( PhysicalDeviceFragmentShadingRateFeaturesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ), "struct and wrapper have different size!" );
  51462. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRateFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  51463. template <>
  51464. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRateFeaturesKHR>
  51465. {
  51466. using Type = PhysicalDeviceFragmentShadingRateFeaturesKHR;
  51467. };
  51468. struct PhysicalDeviceFragmentShadingRatePropertiesKHR
  51469. {
  51470. static const bool allowDuplicate = false;
  51471. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
  51472. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51473. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR(VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize_ = {}, uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ = {}, VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports_ = {}, VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize_ = {}, uint32_t maxFragmentSizeAspectRatio_ = {}, uint32_t maxFragmentShadingRateCoverageSamples_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples_ = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner_ = {}) VULKAN_HPP_NOEXCEPT
  51474. : minFragmentShadingRateAttachmentTexelSize( minFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSize( maxFragmentShadingRateAttachmentTexelSize_ ), maxFragmentShadingRateAttachmentTexelSizeAspectRatio( maxFragmentShadingRateAttachmentTexelSizeAspectRatio_ ), primitiveFragmentShadingRateWithMultipleViewports( primitiveFragmentShadingRateWithMultipleViewports_ ), layeredShadingRateAttachments( layeredShadingRateAttachments_ ), fragmentShadingRateNonTrivialCombinerOps( fragmentShadingRateNonTrivialCombinerOps_ ), maxFragmentSize( maxFragmentSize_ ), maxFragmentSizeAspectRatio( maxFragmentSizeAspectRatio_ ), maxFragmentShadingRateCoverageSamples( maxFragmentShadingRateCoverageSamples_ ), maxFragmentShadingRateRasterizationSamples( maxFragmentShadingRateRasterizationSamples_ ), fragmentShadingRateWithShaderDepthStencilWrites( fragmentShadingRateWithShaderDepthStencilWrites_ ), fragmentShadingRateWithSampleMask( fragmentShadingRateWithSampleMask_ ), fragmentShadingRateWithShaderSampleMask( fragmentShadingRateWithShaderSampleMask_ ), fragmentShadingRateWithConservativeRasterization( fragmentShadingRateWithConservativeRasterization_ ), fragmentShadingRateWithFragmentShaderInterlock( fragmentShadingRateWithFragmentShaderInterlock_ ), fragmentShadingRateWithCustomSampleLocations( fragmentShadingRateWithCustomSampleLocations_ ), fragmentShadingRateStrictMultiplyCombiner( fragmentShadingRateStrictMultiplyCombiner_ )
  51475. {}
  51476. VULKAN_HPP_CONSTEXPR PhysicalDeviceFragmentShadingRatePropertiesKHR( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51477. PhysicalDeviceFragmentShadingRatePropertiesKHR( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  51478. : PhysicalDeviceFragmentShadingRatePropertiesKHR( *reinterpret_cast<PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs ) )
  51479. {}
  51480. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51481. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( PhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51482. PhysicalDeviceFragmentShadingRatePropertiesKHR & operator=( VkPhysicalDeviceFragmentShadingRatePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  51483. {
  51484. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR const *>( &rhs );
  51485. return *this;
  51486. }
  51487. operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  51488. {
  51489. return *reinterpret_cast<const VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
  51490. }
  51491. operator VkPhysicalDeviceFragmentShadingRatePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  51492. {
  51493. return *reinterpret_cast<VkPhysicalDeviceFragmentShadingRatePropertiesKHR*>( this );
  51494. }
  51495. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51496. auto operator<=>( PhysicalDeviceFragmentShadingRatePropertiesKHR const& ) const = default;
  51497. #else
  51498. bool operator==( PhysicalDeviceFragmentShadingRatePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  51499. {
  51500. return ( sType == rhs.sType )
  51501. && ( pNext == rhs.pNext )
  51502. && ( minFragmentShadingRateAttachmentTexelSize == rhs.minFragmentShadingRateAttachmentTexelSize )
  51503. && ( maxFragmentShadingRateAttachmentTexelSize == rhs.maxFragmentShadingRateAttachmentTexelSize )
  51504. && ( maxFragmentShadingRateAttachmentTexelSizeAspectRatio == rhs.maxFragmentShadingRateAttachmentTexelSizeAspectRatio )
  51505. && ( primitiveFragmentShadingRateWithMultipleViewports == rhs.primitiveFragmentShadingRateWithMultipleViewports )
  51506. && ( layeredShadingRateAttachments == rhs.layeredShadingRateAttachments )
  51507. && ( fragmentShadingRateNonTrivialCombinerOps == rhs.fragmentShadingRateNonTrivialCombinerOps )
  51508. && ( maxFragmentSize == rhs.maxFragmentSize )
  51509. && ( maxFragmentSizeAspectRatio == rhs.maxFragmentSizeAspectRatio )
  51510. && ( maxFragmentShadingRateCoverageSamples == rhs.maxFragmentShadingRateCoverageSamples )
  51511. && ( maxFragmentShadingRateRasterizationSamples == rhs.maxFragmentShadingRateRasterizationSamples )
  51512. && ( fragmentShadingRateWithShaderDepthStencilWrites == rhs.fragmentShadingRateWithShaderDepthStencilWrites )
  51513. && ( fragmentShadingRateWithSampleMask == rhs.fragmentShadingRateWithSampleMask )
  51514. && ( fragmentShadingRateWithShaderSampleMask == rhs.fragmentShadingRateWithShaderSampleMask )
  51515. && ( fragmentShadingRateWithConservativeRasterization == rhs.fragmentShadingRateWithConservativeRasterization )
  51516. && ( fragmentShadingRateWithFragmentShaderInterlock == rhs.fragmentShadingRateWithFragmentShaderInterlock )
  51517. && ( fragmentShadingRateWithCustomSampleLocations == rhs.fragmentShadingRateWithCustomSampleLocations )
  51518. && ( fragmentShadingRateStrictMultiplyCombiner == rhs.fragmentShadingRateStrictMultiplyCombiner );
  51519. }
  51520. bool operator!=( PhysicalDeviceFragmentShadingRatePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  51521. {
  51522. return !operator==( rhs );
  51523. }
  51524. #endif
  51525. public:
  51526. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR;
  51527. void* pNext = {};
  51528. VULKAN_HPP_NAMESPACE::Extent2D minFragmentShadingRateAttachmentTexelSize = {};
  51529. VULKAN_HPP_NAMESPACE::Extent2D maxFragmentShadingRateAttachmentTexelSize = {};
  51530. uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio = {};
  51531. VULKAN_HPP_NAMESPACE::Bool32 primitiveFragmentShadingRateWithMultipleViewports = {};
  51532. VULKAN_HPP_NAMESPACE::Bool32 layeredShadingRateAttachments = {};
  51533. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateNonTrivialCombinerOps = {};
  51534. VULKAN_HPP_NAMESPACE::Extent2D maxFragmentSize = {};
  51535. uint32_t maxFragmentSizeAspectRatio = {};
  51536. uint32_t maxFragmentShadingRateCoverageSamples = {};
  51537. VULKAN_HPP_NAMESPACE::SampleCountFlagBits maxFragmentShadingRateRasterizationSamples = VULKAN_HPP_NAMESPACE::SampleCountFlagBits::e1;
  51538. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderDepthStencilWrites = {};
  51539. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithSampleMask = {};
  51540. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithShaderSampleMask = {};
  51541. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithConservativeRasterization = {};
  51542. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithFragmentShaderInterlock = {};
  51543. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateWithCustomSampleLocations = {};
  51544. VULKAN_HPP_NAMESPACE::Bool32 fragmentShadingRateStrictMultiplyCombiner = {};
  51545. };
  51546. static_assert( sizeof( PhysicalDeviceFragmentShadingRatePropertiesKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ), "struct and wrapper have different size!" );
  51547. static_assert( std::is_standard_layout<PhysicalDeviceFragmentShadingRatePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  51548. template <>
  51549. struct CppType<StructureType, StructureType::ePhysicalDeviceFragmentShadingRatePropertiesKHR>
  51550. {
  51551. using Type = PhysicalDeviceFragmentShadingRatePropertiesKHR;
  51552. };
  51553. struct PhysicalDeviceGroupProperties
  51554. {
  51555. static const bool allowDuplicate = false;
  51556. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceGroupProperties;
  51557. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51558. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties(uint32_t physicalDeviceCount_ = {}, std::array<VULKAN_HPP_NAMESPACE::PhysicalDevice,VK_MAX_DEVICE_GROUP_SIZE> const& physicalDevices_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation_ = {}) VULKAN_HPP_NOEXCEPT
  51559. : physicalDeviceCount( physicalDeviceCount_ ), physicalDevices( physicalDevices_ ), subsetAllocation( subsetAllocation_ )
  51560. {}
  51561. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51562. PhysicalDeviceGroupProperties( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  51563. : PhysicalDeviceGroupProperties( *reinterpret_cast<PhysicalDeviceGroupProperties const *>( &rhs ) )
  51564. {}
  51565. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51566. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceGroupProperties & operator=( PhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51567. PhysicalDeviceGroupProperties & operator=( VkPhysicalDeviceGroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  51568. {
  51569. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties const *>( &rhs );
  51570. return *this;
  51571. }
  51572. operator VkPhysicalDeviceGroupProperties const&() const VULKAN_HPP_NOEXCEPT
  51573. {
  51574. return *reinterpret_cast<const VkPhysicalDeviceGroupProperties*>( this );
  51575. }
  51576. operator VkPhysicalDeviceGroupProperties &() VULKAN_HPP_NOEXCEPT
  51577. {
  51578. return *reinterpret_cast<VkPhysicalDeviceGroupProperties*>( this );
  51579. }
  51580. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51581. auto operator<=>( PhysicalDeviceGroupProperties const& ) const = default;
  51582. #else
  51583. bool operator==( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  51584. {
  51585. return ( sType == rhs.sType )
  51586. && ( pNext == rhs.pNext )
  51587. && ( physicalDeviceCount == rhs.physicalDeviceCount )
  51588. && ( physicalDevices == rhs.physicalDevices )
  51589. && ( subsetAllocation == rhs.subsetAllocation );
  51590. }
  51591. bool operator!=( PhysicalDeviceGroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  51592. {
  51593. return !operator==( rhs );
  51594. }
  51595. #endif
  51596. public:
  51597. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceGroupProperties;
  51598. void* pNext = {};
  51599. uint32_t physicalDeviceCount = {};
  51600. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::PhysicalDevice, VK_MAX_DEVICE_GROUP_SIZE> physicalDevices = {};
  51601. VULKAN_HPP_NAMESPACE::Bool32 subsetAllocation = {};
  51602. };
  51603. static_assert( sizeof( PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ), "struct and wrapper have different size!" );
  51604. static_assert( std::is_standard_layout<PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
  51605. template <>
  51606. struct CppType<StructureType, StructureType::ePhysicalDeviceGroupProperties>
  51607. {
  51608. using Type = PhysicalDeviceGroupProperties;
  51609. };
  51610. using PhysicalDeviceGroupPropertiesKHR = PhysicalDeviceGroupProperties;
  51611. struct PhysicalDeviceHostQueryResetFeatures
  51612. {
  51613. static const bool allowDuplicate = false;
  51614. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
  51615. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51616. VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures(VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}) VULKAN_HPP_NOEXCEPT
  51617. : hostQueryReset( hostQueryReset_ )
  51618. {}
  51619. VULKAN_HPP_CONSTEXPR PhysicalDeviceHostQueryResetFeatures( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51620. PhysicalDeviceHostQueryResetFeatures( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  51621. : PhysicalDeviceHostQueryResetFeatures( *reinterpret_cast<PhysicalDeviceHostQueryResetFeatures const *>( &rhs ) )
  51622. {}
  51623. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51624. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceHostQueryResetFeatures & operator=( PhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51625. PhysicalDeviceHostQueryResetFeatures & operator=( VkPhysicalDeviceHostQueryResetFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  51626. {
  51627. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures const *>( &rhs );
  51628. return *this;
  51629. }
  51630. PhysicalDeviceHostQueryResetFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51631. {
  51632. pNext = pNext_;
  51633. return *this;
  51634. }
  51635. PhysicalDeviceHostQueryResetFeatures & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
  51636. {
  51637. hostQueryReset = hostQueryReset_;
  51638. return *this;
  51639. }
  51640. operator VkPhysicalDeviceHostQueryResetFeatures const&() const VULKAN_HPP_NOEXCEPT
  51641. {
  51642. return *reinterpret_cast<const VkPhysicalDeviceHostQueryResetFeatures*>( this );
  51643. }
  51644. operator VkPhysicalDeviceHostQueryResetFeatures &() VULKAN_HPP_NOEXCEPT
  51645. {
  51646. return *reinterpret_cast<VkPhysicalDeviceHostQueryResetFeatures*>( this );
  51647. }
  51648. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51649. auto operator<=>( PhysicalDeviceHostQueryResetFeatures const& ) const = default;
  51650. #else
  51651. bool operator==( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  51652. {
  51653. return ( sType == rhs.sType )
  51654. && ( pNext == rhs.pNext )
  51655. && ( hostQueryReset == rhs.hostQueryReset );
  51656. }
  51657. bool operator!=( PhysicalDeviceHostQueryResetFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  51658. {
  51659. return !operator==( rhs );
  51660. }
  51661. #endif
  51662. public:
  51663. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceHostQueryResetFeatures;
  51664. void* pNext = {};
  51665. VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
  51666. };
  51667. static_assert( sizeof( PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ), "struct and wrapper have different size!" );
  51668. static_assert( std::is_standard_layout<PhysicalDeviceHostQueryResetFeatures>::value, "struct wrapper is not a standard layout!" );
  51669. template <>
  51670. struct CppType<StructureType, StructureType::ePhysicalDeviceHostQueryResetFeatures>
  51671. {
  51672. using Type = PhysicalDeviceHostQueryResetFeatures;
  51673. };
  51674. using PhysicalDeviceHostQueryResetFeaturesEXT = PhysicalDeviceHostQueryResetFeatures;
  51675. struct PhysicalDeviceIDProperties
  51676. {
  51677. static const bool allowDuplicate = false;
  51678. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIdProperties;
  51679. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51680. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}) VULKAN_HPP_NOEXCEPT
  51681. : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ )
  51682. {}
  51683. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51684. PhysicalDeviceIDProperties( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  51685. : PhysicalDeviceIDProperties( *reinterpret_cast<PhysicalDeviceIDProperties const *>( &rhs ) )
  51686. {}
  51687. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51688. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIDProperties & operator=( PhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51689. PhysicalDeviceIDProperties & operator=( VkPhysicalDeviceIDProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  51690. {
  51691. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties const *>( &rhs );
  51692. return *this;
  51693. }
  51694. operator VkPhysicalDeviceIDProperties const&() const VULKAN_HPP_NOEXCEPT
  51695. {
  51696. return *reinterpret_cast<const VkPhysicalDeviceIDProperties*>( this );
  51697. }
  51698. operator VkPhysicalDeviceIDProperties &() VULKAN_HPP_NOEXCEPT
  51699. {
  51700. return *reinterpret_cast<VkPhysicalDeviceIDProperties*>( this );
  51701. }
  51702. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51703. auto operator<=>( PhysicalDeviceIDProperties const& ) const = default;
  51704. #else
  51705. bool operator==( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  51706. {
  51707. return ( sType == rhs.sType )
  51708. && ( pNext == rhs.pNext )
  51709. && ( deviceUUID == rhs.deviceUUID )
  51710. && ( driverUUID == rhs.driverUUID )
  51711. && ( deviceLUID == rhs.deviceLUID )
  51712. && ( deviceNodeMask == rhs.deviceNodeMask )
  51713. && ( deviceLUIDValid == rhs.deviceLUIDValid );
  51714. }
  51715. bool operator!=( PhysicalDeviceIDProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  51716. {
  51717. return !operator==( rhs );
  51718. }
  51719. #endif
  51720. public:
  51721. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIdProperties;
  51722. void* pNext = {};
  51723. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
  51724. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
  51725. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
  51726. uint32_t deviceNodeMask = {};
  51727. VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
  51728. };
  51729. static_assert( sizeof( PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ), "struct and wrapper have different size!" );
  51730. static_assert( std::is_standard_layout<PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
  51731. template <>
  51732. struct CppType<StructureType, StructureType::ePhysicalDeviceIdProperties>
  51733. {
  51734. using Type = PhysicalDeviceIDProperties;
  51735. };
  51736. using PhysicalDeviceIDPropertiesKHR = PhysicalDeviceIDProperties;
  51737. struct PhysicalDeviceImageDrmFormatModifierInfoEXT
  51738. {
  51739. static const bool allowDuplicate = false;
  51740. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
  51741. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51742. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT(uint64_t drmFormatModifier_ = {}, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive, uint32_t queueFamilyIndexCount_ = {}, const uint32_t* pQueueFamilyIndices_ = {}) VULKAN_HPP_NOEXCEPT
  51743. : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( queueFamilyIndexCount_ ), pQueueFamilyIndices( pQueueFamilyIndices_ )
  51744. {}
  51745. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageDrmFormatModifierInfoEXT( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51746. PhysicalDeviceImageDrmFormatModifierInfoEXT( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51747. : PhysicalDeviceImageDrmFormatModifierInfoEXT( *reinterpret_cast<PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs ) )
  51748. {}
  51749. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  51750. PhysicalDeviceImageDrmFormatModifierInfoEXT( uint64_t drmFormatModifier_, VULKAN_HPP_NAMESPACE::SharingMode sharingMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ )
  51751. : drmFormatModifier( drmFormatModifier_ ), sharingMode( sharingMode_ ), queueFamilyIndexCount( static_cast<uint32_t>( queueFamilyIndices_.size() ) ), pQueueFamilyIndices( queueFamilyIndices_.data() )
  51752. {}
  51753. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  51754. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51755. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( PhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51756. PhysicalDeviceImageDrmFormatModifierInfoEXT & operator=( VkPhysicalDeviceImageDrmFormatModifierInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51757. {
  51758. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT const *>( &rhs );
  51759. return *this;
  51760. }
  51761. PhysicalDeviceImageDrmFormatModifierInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51762. {
  51763. pNext = pNext_;
  51764. return *this;
  51765. }
  51766. PhysicalDeviceImageDrmFormatModifierInfoEXT & setDrmFormatModifier( uint64_t drmFormatModifier_ ) VULKAN_HPP_NOEXCEPT
  51767. {
  51768. drmFormatModifier = drmFormatModifier_;
  51769. return *this;
  51770. }
  51771. PhysicalDeviceImageDrmFormatModifierInfoEXT & setSharingMode( VULKAN_HPP_NAMESPACE::SharingMode sharingMode_ ) VULKAN_HPP_NOEXCEPT
  51772. {
  51773. sharingMode = sharingMode_;
  51774. return *this;
  51775. }
  51776. PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndexCount( uint32_t queueFamilyIndexCount_ ) VULKAN_HPP_NOEXCEPT
  51777. {
  51778. queueFamilyIndexCount = queueFamilyIndexCount_;
  51779. return *this;
  51780. }
  51781. PhysicalDeviceImageDrmFormatModifierInfoEXT & setPQueueFamilyIndices( const uint32_t* pQueueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  51782. {
  51783. pQueueFamilyIndices = pQueueFamilyIndices_;
  51784. return *this;
  51785. }
  51786. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  51787. PhysicalDeviceImageDrmFormatModifierInfoEXT & setQueueFamilyIndices( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & queueFamilyIndices_ ) VULKAN_HPP_NOEXCEPT
  51788. {
  51789. queueFamilyIndexCount = static_cast<uint32_t>( queueFamilyIndices_.size() );
  51790. pQueueFamilyIndices = queueFamilyIndices_.data();
  51791. return *this;
  51792. }
  51793. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  51794. operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  51795. {
  51796. return *reinterpret_cast<const VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
  51797. }
  51798. operator VkPhysicalDeviceImageDrmFormatModifierInfoEXT &() VULKAN_HPP_NOEXCEPT
  51799. {
  51800. return *reinterpret_cast<VkPhysicalDeviceImageDrmFormatModifierInfoEXT*>( this );
  51801. }
  51802. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51803. auto operator<=>( PhysicalDeviceImageDrmFormatModifierInfoEXT const& ) const = default;
  51804. #else
  51805. bool operator==( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51806. {
  51807. return ( sType == rhs.sType )
  51808. && ( pNext == rhs.pNext )
  51809. && ( drmFormatModifier == rhs.drmFormatModifier )
  51810. && ( sharingMode == rhs.sharingMode )
  51811. && ( queueFamilyIndexCount == rhs.queueFamilyIndexCount )
  51812. && ( pQueueFamilyIndices == rhs.pQueueFamilyIndices );
  51813. }
  51814. bool operator!=( PhysicalDeviceImageDrmFormatModifierInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51815. {
  51816. return !operator==( rhs );
  51817. }
  51818. #endif
  51819. public:
  51820. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT;
  51821. const void* pNext = {};
  51822. uint64_t drmFormatModifier = {};
  51823. VULKAN_HPP_NAMESPACE::SharingMode sharingMode = VULKAN_HPP_NAMESPACE::SharingMode::eExclusive;
  51824. uint32_t queueFamilyIndexCount = {};
  51825. const uint32_t* pQueueFamilyIndices = {};
  51826. };
  51827. static_assert( sizeof( PhysicalDeviceImageDrmFormatModifierInfoEXT ) == sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ), "struct and wrapper have different size!" );
  51828. static_assert( std::is_standard_layout<PhysicalDeviceImageDrmFormatModifierInfoEXT>::value, "struct wrapper is not a standard layout!" );
  51829. template <>
  51830. struct CppType<StructureType, StructureType::ePhysicalDeviceImageDrmFormatModifierInfoEXT>
  51831. {
  51832. using Type = PhysicalDeviceImageDrmFormatModifierInfoEXT;
  51833. };
  51834. struct PhysicalDeviceImageRobustnessFeaturesEXT
  51835. {
  51836. static const bool allowDuplicate = false;
  51837. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
  51838. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51839. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ = {}) VULKAN_HPP_NOEXCEPT
  51840. : robustImageAccess( robustImageAccess_ )
  51841. {}
  51842. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageRobustnessFeaturesEXT( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51843. PhysicalDeviceImageRobustnessFeaturesEXT( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51844. : PhysicalDeviceImageRobustnessFeaturesEXT( *reinterpret_cast<PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs ) )
  51845. {}
  51846. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51847. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageRobustnessFeaturesEXT & operator=( PhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51848. PhysicalDeviceImageRobustnessFeaturesEXT & operator=( VkPhysicalDeviceImageRobustnessFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51849. {
  51850. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT const *>( &rhs );
  51851. return *this;
  51852. }
  51853. PhysicalDeviceImageRobustnessFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51854. {
  51855. pNext = pNext_;
  51856. return *this;
  51857. }
  51858. PhysicalDeviceImageRobustnessFeaturesEXT & setRobustImageAccess( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess_ ) VULKAN_HPP_NOEXCEPT
  51859. {
  51860. robustImageAccess = robustImageAccess_;
  51861. return *this;
  51862. }
  51863. operator VkPhysicalDeviceImageRobustnessFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  51864. {
  51865. return *reinterpret_cast<const VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
  51866. }
  51867. operator VkPhysicalDeviceImageRobustnessFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  51868. {
  51869. return *reinterpret_cast<VkPhysicalDeviceImageRobustnessFeaturesEXT*>( this );
  51870. }
  51871. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51872. auto operator<=>( PhysicalDeviceImageRobustnessFeaturesEXT const& ) const = default;
  51873. #else
  51874. bool operator==( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51875. {
  51876. return ( sType == rhs.sType )
  51877. && ( pNext == rhs.pNext )
  51878. && ( robustImageAccess == rhs.robustImageAccess );
  51879. }
  51880. bool operator!=( PhysicalDeviceImageRobustnessFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51881. {
  51882. return !operator==( rhs );
  51883. }
  51884. #endif
  51885. public:
  51886. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT;
  51887. void* pNext = {};
  51888. VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess = {};
  51889. };
  51890. static_assert( sizeof( PhysicalDeviceImageRobustnessFeaturesEXT ) == sizeof( VkPhysicalDeviceImageRobustnessFeaturesEXT ), "struct and wrapper have different size!" );
  51891. static_assert( std::is_standard_layout<PhysicalDeviceImageRobustnessFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  51892. template <>
  51893. struct CppType<StructureType, StructureType::ePhysicalDeviceImageRobustnessFeaturesEXT>
  51894. {
  51895. using Type = PhysicalDeviceImageRobustnessFeaturesEXT;
  51896. };
  51897. struct PhysicalDeviceImageViewImageFormatInfoEXT
  51898. {
  51899. static const bool allowDuplicate = false;
  51900. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
  51901. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51902. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT(VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ = VULKAN_HPP_NAMESPACE::ImageViewType::e1D) VULKAN_HPP_NOEXCEPT
  51903. : imageViewType( imageViewType_ )
  51904. {}
  51905. VULKAN_HPP_CONSTEXPR PhysicalDeviceImageViewImageFormatInfoEXT( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51906. PhysicalDeviceImageViewImageFormatInfoEXT( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51907. : PhysicalDeviceImageViewImageFormatInfoEXT( *reinterpret_cast<PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs ) )
  51908. {}
  51909. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51910. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImageViewImageFormatInfoEXT & operator=( PhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51911. PhysicalDeviceImageViewImageFormatInfoEXT & operator=( VkPhysicalDeviceImageViewImageFormatInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  51912. {
  51913. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT const *>( &rhs );
  51914. return *this;
  51915. }
  51916. PhysicalDeviceImageViewImageFormatInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51917. {
  51918. pNext = pNext_;
  51919. return *this;
  51920. }
  51921. PhysicalDeviceImageViewImageFormatInfoEXT & setImageViewType( VULKAN_HPP_NAMESPACE::ImageViewType imageViewType_ ) VULKAN_HPP_NOEXCEPT
  51922. {
  51923. imageViewType = imageViewType_;
  51924. return *this;
  51925. }
  51926. operator VkPhysicalDeviceImageViewImageFormatInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  51927. {
  51928. return *reinterpret_cast<const VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
  51929. }
  51930. operator VkPhysicalDeviceImageViewImageFormatInfoEXT &() VULKAN_HPP_NOEXCEPT
  51931. {
  51932. return *reinterpret_cast<VkPhysicalDeviceImageViewImageFormatInfoEXT*>( this );
  51933. }
  51934. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51935. auto operator<=>( PhysicalDeviceImageViewImageFormatInfoEXT const& ) const = default;
  51936. #else
  51937. bool operator==( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51938. {
  51939. return ( sType == rhs.sType )
  51940. && ( pNext == rhs.pNext )
  51941. && ( imageViewType == rhs.imageViewType );
  51942. }
  51943. bool operator!=( PhysicalDeviceImageViewImageFormatInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  51944. {
  51945. return !operator==( rhs );
  51946. }
  51947. #endif
  51948. public:
  51949. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT;
  51950. void* pNext = {};
  51951. VULKAN_HPP_NAMESPACE::ImageViewType imageViewType = VULKAN_HPP_NAMESPACE::ImageViewType::e1D;
  51952. };
  51953. static_assert( sizeof( PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ), "struct and wrapper have different size!" );
  51954. static_assert( std::is_standard_layout<PhysicalDeviceImageViewImageFormatInfoEXT>::value, "struct wrapper is not a standard layout!" );
  51955. template <>
  51956. struct CppType<StructureType, StructureType::ePhysicalDeviceImageViewImageFormatInfoEXT>
  51957. {
  51958. using Type = PhysicalDeviceImageViewImageFormatInfoEXT;
  51959. };
  51960. struct PhysicalDeviceImagelessFramebufferFeatures
  51961. {
  51962. static const bool allowDuplicate = false;
  51963. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
  51964. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51965. VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures(VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}) VULKAN_HPP_NOEXCEPT
  51966. : imagelessFramebuffer( imagelessFramebuffer_ )
  51967. {}
  51968. VULKAN_HPP_CONSTEXPR PhysicalDeviceImagelessFramebufferFeatures( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51969. PhysicalDeviceImagelessFramebufferFeatures( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  51970. : PhysicalDeviceImagelessFramebufferFeatures( *reinterpret_cast<PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs ) )
  51971. {}
  51972. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  51973. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceImagelessFramebufferFeatures & operator=( PhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  51974. PhysicalDeviceImagelessFramebufferFeatures & operator=( VkPhysicalDeviceImagelessFramebufferFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  51975. {
  51976. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures const *>( &rhs );
  51977. return *this;
  51978. }
  51979. PhysicalDeviceImagelessFramebufferFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  51980. {
  51981. pNext = pNext_;
  51982. return *this;
  51983. }
  51984. PhysicalDeviceImagelessFramebufferFeatures & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
  51985. {
  51986. imagelessFramebuffer = imagelessFramebuffer_;
  51987. return *this;
  51988. }
  51989. operator VkPhysicalDeviceImagelessFramebufferFeatures const&() const VULKAN_HPP_NOEXCEPT
  51990. {
  51991. return *reinterpret_cast<const VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
  51992. }
  51993. operator VkPhysicalDeviceImagelessFramebufferFeatures &() VULKAN_HPP_NOEXCEPT
  51994. {
  51995. return *reinterpret_cast<VkPhysicalDeviceImagelessFramebufferFeatures*>( this );
  51996. }
  51997. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  51998. auto operator<=>( PhysicalDeviceImagelessFramebufferFeatures const& ) const = default;
  51999. #else
  52000. bool operator==( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  52001. {
  52002. return ( sType == rhs.sType )
  52003. && ( pNext == rhs.pNext )
  52004. && ( imagelessFramebuffer == rhs.imagelessFramebuffer );
  52005. }
  52006. bool operator!=( PhysicalDeviceImagelessFramebufferFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  52007. {
  52008. return !operator==( rhs );
  52009. }
  52010. #endif
  52011. public:
  52012. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceImagelessFramebufferFeatures;
  52013. void* pNext = {};
  52014. VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
  52015. };
  52016. static_assert( sizeof( PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ), "struct and wrapper have different size!" );
  52017. static_assert( std::is_standard_layout<PhysicalDeviceImagelessFramebufferFeatures>::value, "struct wrapper is not a standard layout!" );
  52018. template <>
  52019. struct CppType<StructureType, StructureType::ePhysicalDeviceImagelessFramebufferFeatures>
  52020. {
  52021. using Type = PhysicalDeviceImagelessFramebufferFeatures;
  52022. };
  52023. using PhysicalDeviceImagelessFramebufferFeaturesKHR = PhysicalDeviceImagelessFramebufferFeatures;
  52024. struct PhysicalDeviceIndexTypeUint8FeaturesEXT
  52025. {
  52026. static const bool allowDuplicate = false;
  52027. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
  52028. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52029. VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ = {}) VULKAN_HPP_NOEXCEPT
  52030. : indexTypeUint8( indexTypeUint8_ )
  52031. {}
  52032. VULKAN_HPP_CONSTEXPR PhysicalDeviceIndexTypeUint8FeaturesEXT( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52033. PhysicalDeviceIndexTypeUint8FeaturesEXT( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52034. : PhysicalDeviceIndexTypeUint8FeaturesEXT( *reinterpret_cast<PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs ) )
  52035. {}
  52036. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52037. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( PhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52038. PhysicalDeviceIndexTypeUint8FeaturesEXT & operator=( VkPhysicalDeviceIndexTypeUint8FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52039. {
  52040. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT const *>( &rhs );
  52041. return *this;
  52042. }
  52043. PhysicalDeviceIndexTypeUint8FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52044. {
  52045. pNext = pNext_;
  52046. return *this;
  52047. }
  52048. PhysicalDeviceIndexTypeUint8FeaturesEXT & setIndexTypeUint8( VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8_ ) VULKAN_HPP_NOEXCEPT
  52049. {
  52050. indexTypeUint8 = indexTypeUint8_;
  52051. return *this;
  52052. }
  52053. operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  52054. {
  52055. return *reinterpret_cast<const VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
  52056. }
  52057. operator VkPhysicalDeviceIndexTypeUint8FeaturesEXT &() VULKAN_HPP_NOEXCEPT
  52058. {
  52059. return *reinterpret_cast<VkPhysicalDeviceIndexTypeUint8FeaturesEXT*>( this );
  52060. }
  52061. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52062. auto operator<=>( PhysicalDeviceIndexTypeUint8FeaturesEXT const& ) const = default;
  52063. #else
  52064. bool operator==( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52065. {
  52066. return ( sType == rhs.sType )
  52067. && ( pNext == rhs.pNext )
  52068. && ( indexTypeUint8 == rhs.indexTypeUint8 );
  52069. }
  52070. bool operator!=( PhysicalDeviceIndexTypeUint8FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52071. {
  52072. return !operator==( rhs );
  52073. }
  52074. #endif
  52075. public:
  52076. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT;
  52077. void* pNext = {};
  52078. VULKAN_HPP_NAMESPACE::Bool32 indexTypeUint8 = {};
  52079. };
  52080. static_assert( sizeof( PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ), "struct and wrapper have different size!" );
  52081. static_assert( std::is_standard_layout<PhysicalDeviceIndexTypeUint8FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  52082. template <>
  52083. struct CppType<StructureType, StructureType::ePhysicalDeviceIndexTypeUint8FeaturesEXT>
  52084. {
  52085. using Type = PhysicalDeviceIndexTypeUint8FeaturesEXT;
  52086. };
  52087. struct PhysicalDeviceInlineUniformBlockFeaturesEXT
  52088. {
  52089. static const bool allowDuplicate = false;
  52090. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
  52091. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52092. VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ = {}) VULKAN_HPP_NOEXCEPT
  52093. : inlineUniformBlock( inlineUniformBlock_ ), descriptorBindingInlineUniformBlockUpdateAfterBind( descriptorBindingInlineUniformBlockUpdateAfterBind_ )
  52094. {}
  52095. VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockFeaturesEXT( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52096. PhysicalDeviceInlineUniformBlockFeaturesEXT( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52097. : PhysicalDeviceInlineUniformBlockFeaturesEXT( *reinterpret_cast<PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs ) )
  52098. {}
  52099. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52100. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( PhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52101. PhysicalDeviceInlineUniformBlockFeaturesEXT & operator=( VkPhysicalDeviceInlineUniformBlockFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52102. {
  52103. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT const *>( &rhs );
  52104. return *this;
  52105. }
  52106. PhysicalDeviceInlineUniformBlockFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52107. {
  52108. pNext = pNext_;
  52109. return *this;
  52110. }
  52111. PhysicalDeviceInlineUniformBlockFeaturesEXT & setInlineUniformBlock( VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock_ ) VULKAN_HPP_NOEXCEPT
  52112. {
  52113. inlineUniformBlock = inlineUniformBlock_;
  52114. return *this;
  52115. }
  52116. PhysicalDeviceInlineUniformBlockFeaturesEXT & setDescriptorBindingInlineUniformBlockUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  52117. {
  52118. descriptorBindingInlineUniformBlockUpdateAfterBind = descriptorBindingInlineUniformBlockUpdateAfterBind_;
  52119. return *this;
  52120. }
  52121. operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  52122. {
  52123. return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
  52124. }
  52125. operator VkPhysicalDeviceInlineUniformBlockFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  52126. {
  52127. return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockFeaturesEXT*>( this );
  52128. }
  52129. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52130. auto operator<=>( PhysicalDeviceInlineUniformBlockFeaturesEXT const& ) const = default;
  52131. #else
  52132. bool operator==( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52133. {
  52134. return ( sType == rhs.sType )
  52135. && ( pNext == rhs.pNext )
  52136. && ( inlineUniformBlock == rhs.inlineUniformBlock )
  52137. && ( descriptorBindingInlineUniformBlockUpdateAfterBind == rhs.descriptorBindingInlineUniformBlockUpdateAfterBind );
  52138. }
  52139. bool operator!=( PhysicalDeviceInlineUniformBlockFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52140. {
  52141. return !operator==( rhs );
  52142. }
  52143. #endif
  52144. public:
  52145. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT;
  52146. void* pNext = {};
  52147. VULKAN_HPP_NAMESPACE::Bool32 inlineUniformBlock = {};
  52148. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingInlineUniformBlockUpdateAfterBind = {};
  52149. };
  52150. static_assert( sizeof( PhysicalDeviceInlineUniformBlockFeaturesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeaturesEXT ), "struct and wrapper have different size!" );
  52151. static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  52152. template <>
  52153. struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockFeaturesEXT>
  52154. {
  52155. using Type = PhysicalDeviceInlineUniformBlockFeaturesEXT;
  52156. };
  52157. struct PhysicalDeviceInlineUniformBlockPropertiesEXT
  52158. {
  52159. static const bool allowDuplicate = false;
  52160. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
  52161. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52162. VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT(uint32_t maxInlineUniformBlockSize_ = {}, uint32_t maxPerStageDescriptorInlineUniformBlocks_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetInlineUniformBlocks_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ = {}) VULKAN_HPP_NOEXCEPT
  52163. : maxInlineUniformBlockSize( maxInlineUniformBlockSize_ ), maxPerStageDescriptorInlineUniformBlocks( maxPerStageDescriptorInlineUniformBlocks_ ), maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks_ ), maxDescriptorSetInlineUniformBlocks( maxDescriptorSetInlineUniformBlocks_ ), maxDescriptorSetUpdateAfterBindInlineUniformBlocks( maxDescriptorSetUpdateAfterBindInlineUniformBlocks_ )
  52164. {}
  52165. VULKAN_HPP_CONSTEXPR PhysicalDeviceInlineUniformBlockPropertiesEXT( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52166. PhysicalDeviceInlineUniformBlockPropertiesEXT( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52167. : PhysicalDeviceInlineUniformBlockPropertiesEXT( *reinterpret_cast<PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs ) )
  52168. {}
  52169. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52170. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( PhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52171. PhysicalDeviceInlineUniformBlockPropertiesEXT & operator=( VkPhysicalDeviceInlineUniformBlockPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52172. {
  52173. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT const *>( &rhs );
  52174. return *this;
  52175. }
  52176. operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  52177. {
  52178. return *reinterpret_cast<const VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
  52179. }
  52180. operator VkPhysicalDeviceInlineUniformBlockPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  52181. {
  52182. return *reinterpret_cast<VkPhysicalDeviceInlineUniformBlockPropertiesEXT*>( this );
  52183. }
  52184. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52185. auto operator<=>( PhysicalDeviceInlineUniformBlockPropertiesEXT const& ) const = default;
  52186. #else
  52187. bool operator==( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52188. {
  52189. return ( sType == rhs.sType )
  52190. && ( pNext == rhs.pNext )
  52191. && ( maxInlineUniformBlockSize == rhs.maxInlineUniformBlockSize )
  52192. && ( maxPerStageDescriptorInlineUniformBlocks == rhs.maxPerStageDescriptorInlineUniformBlocks )
  52193. && ( maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks == rhs.maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks )
  52194. && ( maxDescriptorSetInlineUniformBlocks == rhs.maxDescriptorSetInlineUniformBlocks )
  52195. && ( maxDescriptorSetUpdateAfterBindInlineUniformBlocks == rhs.maxDescriptorSetUpdateAfterBindInlineUniformBlocks );
  52196. }
  52197. bool operator!=( PhysicalDeviceInlineUniformBlockPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52198. {
  52199. return !operator==( rhs );
  52200. }
  52201. #endif
  52202. public:
  52203. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT;
  52204. void* pNext = {};
  52205. uint32_t maxInlineUniformBlockSize = {};
  52206. uint32_t maxPerStageDescriptorInlineUniformBlocks = {};
  52207. uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks = {};
  52208. uint32_t maxDescriptorSetInlineUniformBlocks = {};
  52209. uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks = {};
  52210. };
  52211. static_assert( sizeof( PhysicalDeviceInlineUniformBlockPropertiesEXT ) == sizeof( VkPhysicalDeviceInlineUniformBlockPropertiesEXT ), "struct and wrapper have different size!" );
  52212. static_assert( std::is_standard_layout<PhysicalDeviceInlineUniformBlockPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  52213. template <>
  52214. struct CppType<StructureType, StructureType::ePhysicalDeviceInlineUniformBlockPropertiesEXT>
  52215. {
  52216. using Type = PhysicalDeviceInlineUniformBlockPropertiesEXT;
  52217. };
  52218. struct PhysicalDeviceLineRasterizationFeaturesEXT
  52219. {
  52220. static const bool allowDuplicate = false;
  52221. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
  52222. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52223. VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ = {}) VULKAN_HPP_NOEXCEPT
  52224. : rectangularLines( rectangularLines_ ), bresenhamLines( bresenhamLines_ ), smoothLines( smoothLines_ ), stippledRectangularLines( stippledRectangularLines_ ), stippledBresenhamLines( stippledBresenhamLines_ ), stippledSmoothLines( stippledSmoothLines_ )
  52225. {}
  52226. VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationFeaturesEXT( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52227. PhysicalDeviceLineRasterizationFeaturesEXT( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52228. : PhysicalDeviceLineRasterizationFeaturesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs ) )
  52229. {}
  52230. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52231. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationFeaturesEXT & operator=( PhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52232. PhysicalDeviceLineRasterizationFeaturesEXT & operator=( VkPhysicalDeviceLineRasterizationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52233. {
  52234. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT const *>( &rhs );
  52235. return *this;
  52236. }
  52237. PhysicalDeviceLineRasterizationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52238. {
  52239. pNext = pNext_;
  52240. return *this;
  52241. }
  52242. PhysicalDeviceLineRasterizationFeaturesEXT & setRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 rectangularLines_ ) VULKAN_HPP_NOEXCEPT
  52243. {
  52244. rectangularLines = rectangularLines_;
  52245. return *this;
  52246. }
  52247. PhysicalDeviceLineRasterizationFeaturesEXT & setBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines_ ) VULKAN_HPP_NOEXCEPT
  52248. {
  52249. bresenhamLines = bresenhamLines_;
  52250. return *this;
  52251. }
  52252. PhysicalDeviceLineRasterizationFeaturesEXT & setSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 smoothLines_ ) VULKAN_HPP_NOEXCEPT
  52253. {
  52254. smoothLines = smoothLines_;
  52255. return *this;
  52256. }
  52257. PhysicalDeviceLineRasterizationFeaturesEXT & setStippledRectangularLines( VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines_ ) VULKAN_HPP_NOEXCEPT
  52258. {
  52259. stippledRectangularLines = stippledRectangularLines_;
  52260. return *this;
  52261. }
  52262. PhysicalDeviceLineRasterizationFeaturesEXT & setStippledBresenhamLines( VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines_ ) VULKAN_HPP_NOEXCEPT
  52263. {
  52264. stippledBresenhamLines = stippledBresenhamLines_;
  52265. return *this;
  52266. }
  52267. PhysicalDeviceLineRasterizationFeaturesEXT & setStippledSmoothLines( VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines_ ) VULKAN_HPP_NOEXCEPT
  52268. {
  52269. stippledSmoothLines = stippledSmoothLines_;
  52270. return *this;
  52271. }
  52272. operator VkPhysicalDeviceLineRasterizationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  52273. {
  52274. return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
  52275. }
  52276. operator VkPhysicalDeviceLineRasterizationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  52277. {
  52278. return *reinterpret_cast<VkPhysicalDeviceLineRasterizationFeaturesEXT*>( this );
  52279. }
  52280. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52281. auto operator<=>( PhysicalDeviceLineRasterizationFeaturesEXT const& ) const = default;
  52282. #else
  52283. bool operator==( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52284. {
  52285. return ( sType == rhs.sType )
  52286. && ( pNext == rhs.pNext )
  52287. && ( rectangularLines == rhs.rectangularLines )
  52288. && ( bresenhamLines == rhs.bresenhamLines )
  52289. && ( smoothLines == rhs.smoothLines )
  52290. && ( stippledRectangularLines == rhs.stippledRectangularLines )
  52291. && ( stippledBresenhamLines == rhs.stippledBresenhamLines )
  52292. && ( stippledSmoothLines == rhs.stippledSmoothLines );
  52293. }
  52294. bool operator!=( PhysicalDeviceLineRasterizationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52295. {
  52296. return !operator==( rhs );
  52297. }
  52298. #endif
  52299. public:
  52300. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT;
  52301. void* pNext = {};
  52302. VULKAN_HPP_NAMESPACE::Bool32 rectangularLines = {};
  52303. VULKAN_HPP_NAMESPACE::Bool32 bresenhamLines = {};
  52304. VULKAN_HPP_NAMESPACE::Bool32 smoothLines = {};
  52305. VULKAN_HPP_NAMESPACE::Bool32 stippledRectangularLines = {};
  52306. VULKAN_HPP_NAMESPACE::Bool32 stippledBresenhamLines = {};
  52307. VULKAN_HPP_NAMESPACE::Bool32 stippledSmoothLines = {};
  52308. };
  52309. static_assert( sizeof( PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ), "struct and wrapper have different size!" );
  52310. static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  52311. template <>
  52312. struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationFeaturesEXT>
  52313. {
  52314. using Type = PhysicalDeviceLineRasterizationFeaturesEXT;
  52315. };
  52316. struct PhysicalDeviceLineRasterizationPropertiesEXT
  52317. {
  52318. static const bool allowDuplicate = false;
  52319. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
  52320. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52321. VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT(uint32_t lineSubPixelPrecisionBits_ = {}) VULKAN_HPP_NOEXCEPT
  52322. : lineSubPixelPrecisionBits( lineSubPixelPrecisionBits_ )
  52323. {}
  52324. VULKAN_HPP_CONSTEXPR PhysicalDeviceLineRasterizationPropertiesEXT( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52325. PhysicalDeviceLineRasterizationPropertiesEXT( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52326. : PhysicalDeviceLineRasterizationPropertiesEXT( *reinterpret_cast<PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs ) )
  52327. {}
  52328. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52329. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceLineRasterizationPropertiesEXT & operator=( PhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52330. PhysicalDeviceLineRasterizationPropertiesEXT & operator=( VkPhysicalDeviceLineRasterizationPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52331. {
  52332. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT const *>( &rhs );
  52333. return *this;
  52334. }
  52335. operator VkPhysicalDeviceLineRasterizationPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  52336. {
  52337. return *reinterpret_cast<const VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
  52338. }
  52339. operator VkPhysicalDeviceLineRasterizationPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  52340. {
  52341. return *reinterpret_cast<VkPhysicalDeviceLineRasterizationPropertiesEXT*>( this );
  52342. }
  52343. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52344. auto operator<=>( PhysicalDeviceLineRasterizationPropertiesEXT const& ) const = default;
  52345. #else
  52346. bool operator==( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52347. {
  52348. return ( sType == rhs.sType )
  52349. && ( pNext == rhs.pNext )
  52350. && ( lineSubPixelPrecisionBits == rhs.lineSubPixelPrecisionBits );
  52351. }
  52352. bool operator!=( PhysicalDeviceLineRasterizationPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52353. {
  52354. return !operator==( rhs );
  52355. }
  52356. #endif
  52357. public:
  52358. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT;
  52359. void* pNext = {};
  52360. uint32_t lineSubPixelPrecisionBits = {};
  52361. };
  52362. static_assert( sizeof( PhysicalDeviceLineRasterizationPropertiesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ), "struct and wrapper have different size!" );
  52363. static_assert( std::is_standard_layout<PhysicalDeviceLineRasterizationPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  52364. template <>
  52365. struct CppType<StructureType, StructureType::ePhysicalDeviceLineRasterizationPropertiesEXT>
  52366. {
  52367. using Type = PhysicalDeviceLineRasterizationPropertiesEXT;
  52368. };
  52369. struct PhysicalDeviceMaintenance3Properties
  52370. {
  52371. static const bool allowDuplicate = false;
  52372. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMaintenance3Properties;
  52373. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52374. VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties(uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
  52375. : maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
  52376. {}
  52377. VULKAN_HPP_CONSTEXPR PhysicalDeviceMaintenance3Properties( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52378. PhysicalDeviceMaintenance3Properties( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  52379. : PhysicalDeviceMaintenance3Properties( *reinterpret_cast<PhysicalDeviceMaintenance3Properties const *>( &rhs ) )
  52380. {}
  52381. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52382. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMaintenance3Properties & operator=( PhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52383. PhysicalDeviceMaintenance3Properties & operator=( VkPhysicalDeviceMaintenance3Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  52384. {
  52385. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties const *>( &rhs );
  52386. return *this;
  52387. }
  52388. operator VkPhysicalDeviceMaintenance3Properties const&() const VULKAN_HPP_NOEXCEPT
  52389. {
  52390. return *reinterpret_cast<const VkPhysicalDeviceMaintenance3Properties*>( this );
  52391. }
  52392. operator VkPhysicalDeviceMaintenance3Properties &() VULKAN_HPP_NOEXCEPT
  52393. {
  52394. return *reinterpret_cast<VkPhysicalDeviceMaintenance3Properties*>( this );
  52395. }
  52396. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52397. auto operator<=>( PhysicalDeviceMaintenance3Properties const& ) const = default;
  52398. #else
  52399. bool operator==( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  52400. {
  52401. return ( sType == rhs.sType )
  52402. && ( pNext == rhs.pNext )
  52403. && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
  52404. && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
  52405. }
  52406. bool operator!=( PhysicalDeviceMaintenance3Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  52407. {
  52408. return !operator==( rhs );
  52409. }
  52410. #endif
  52411. public:
  52412. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMaintenance3Properties;
  52413. void* pNext = {};
  52414. uint32_t maxPerSetDescriptors = {};
  52415. VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
  52416. };
  52417. static_assert( sizeof( PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ), "struct and wrapper have different size!" );
  52418. static_assert( std::is_standard_layout<PhysicalDeviceMaintenance3Properties>::value, "struct wrapper is not a standard layout!" );
  52419. template <>
  52420. struct CppType<StructureType, StructureType::ePhysicalDeviceMaintenance3Properties>
  52421. {
  52422. using Type = PhysicalDeviceMaintenance3Properties;
  52423. };
  52424. using PhysicalDeviceMaintenance3PropertiesKHR = PhysicalDeviceMaintenance3Properties;
  52425. struct PhysicalDeviceMemoryBudgetPropertiesEXT
  52426. {
  52427. static const bool allowDuplicate = false;
  52428. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
  52429. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52430. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT(std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapBudget_ = {}, std::array<VULKAN_HPP_NAMESPACE::DeviceSize,VK_MAX_MEMORY_HEAPS> const& heapUsage_ = {}) VULKAN_HPP_NOEXCEPT
  52431. : heapBudget( heapBudget_ ), heapUsage( heapUsage_ )
  52432. {}
  52433. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52434. PhysicalDeviceMemoryBudgetPropertiesEXT( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52435. : PhysicalDeviceMemoryBudgetPropertiesEXT( *reinterpret_cast<PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs ) )
  52436. {}
  52437. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52438. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( PhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52439. PhysicalDeviceMemoryBudgetPropertiesEXT & operator=( VkPhysicalDeviceMemoryBudgetPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52440. {
  52441. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT const *>( &rhs );
  52442. return *this;
  52443. }
  52444. operator VkPhysicalDeviceMemoryBudgetPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  52445. {
  52446. return *reinterpret_cast<const VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
  52447. }
  52448. operator VkPhysicalDeviceMemoryBudgetPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  52449. {
  52450. return *reinterpret_cast<VkPhysicalDeviceMemoryBudgetPropertiesEXT*>( this );
  52451. }
  52452. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52453. auto operator<=>( PhysicalDeviceMemoryBudgetPropertiesEXT const& ) const = default;
  52454. #else
  52455. bool operator==( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52456. {
  52457. return ( sType == rhs.sType )
  52458. && ( pNext == rhs.pNext )
  52459. && ( heapBudget == rhs.heapBudget )
  52460. && ( heapUsage == rhs.heapUsage );
  52461. }
  52462. bool operator!=( PhysicalDeviceMemoryBudgetPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52463. {
  52464. return !operator==( rhs );
  52465. }
  52466. #endif
  52467. public:
  52468. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT;
  52469. void* pNext = {};
  52470. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapBudget = {};
  52471. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::DeviceSize, VK_MAX_MEMORY_HEAPS> heapUsage = {};
  52472. };
  52473. static_assert( sizeof( PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ), "struct and wrapper have different size!" );
  52474. static_assert( std::is_standard_layout<PhysicalDeviceMemoryBudgetPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  52475. template <>
  52476. struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryBudgetPropertiesEXT>
  52477. {
  52478. using Type = PhysicalDeviceMemoryBudgetPropertiesEXT;
  52479. };
  52480. struct PhysicalDeviceMemoryPriorityFeaturesEXT
  52481. {
  52482. static const bool allowDuplicate = false;
  52483. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
  52484. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52485. VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ = {}) VULKAN_HPP_NOEXCEPT
  52486. : memoryPriority( memoryPriority_ )
  52487. {}
  52488. VULKAN_HPP_CONSTEXPR PhysicalDeviceMemoryPriorityFeaturesEXT( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52489. PhysicalDeviceMemoryPriorityFeaturesEXT( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52490. : PhysicalDeviceMemoryPriorityFeaturesEXT( *reinterpret_cast<PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs ) )
  52491. {}
  52492. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52493. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( PhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52494. PhysicalDeviceMemoryPriorityFeaturesEXT & operator=( VkPhysicalDeviceMemoryPriorityFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52495. {
  52496. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT const *>( &rhs );
  52497. return *this;
  52498. }
  52499. PhysicalDeviceMemoryPriorityFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52500. {
  52501. pNext = pNext_;
  52502. return *this;
  52503. }
  52504. PhysicalDeviceMemoryPriorityFeaturesEXT & setMemoryPriority( VULKAN_HPP_NAMESPACE::Bool32 memoryPriority_ ) VULKAN_HPP_NOEXCEPT
  52505. {
  52506. memoryPriority = memoryPriority_;
  52507. return *this;
  52508. }
  52509. operator VkPhysicalDeviceMemoryPriorityFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  52510. {
  52511. return *reinterpret_cast<const VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
  52512. }
  52513. operator VkPhysicalDeviceMemoryPriorityFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  52514. {
  52515. return *reinterpret_cast<VkPhysicalDeviceMemoryPriorityFeaturesEXT*>( this );
  52516. }
  52517. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52518. auto operator<=>( PhysicalDeviceMemoryPriorityFeaturesEXT const& ) const = default;
  52519. #else
  52520. bool operator==( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52521. {
  52522. return ( sType == rhs.sType )
  52523. && ( pNext == rhs.pNext )
  52524. && ( memoryPriority == rhs.memoryPriority );
  52525. }
  52526. bool operator!=( PhysicalDeviceMemoryPriorityFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52527. {
  52528. return !operator==( rhs );
  52529. }
  52530. #endif
  52531. public:
  52532. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT;
  52533. void* pNext = {};
  52534. VULKAN_HPP_NAMESPACE::Bool32 memoryPriority = {};
  52535. };
  52536. static_assert( sizeof( PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ), "struct and wrapper have different size!" );
  52537. static_assert( std::is_standard_layout<PhysicalDeviceMemoryPriorityFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  52538. template <>
  52539. struct CppType<StructureType, StructureType::ePhysicalDeviceMemoryPriorityFeaturesEXT>
  52540. {
  52541. using Type = PhysicalDeviceMemoryPriorityFeaturesEXT;
  52542. };
  52543. struct PhysicalDeviceMeshShaderFeaturesNV
  52544. {
  52545. static const bool allowDuplicate = false;
  52546. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
  52547. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52548. VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 taskShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 meshShader_ = {}) VULKAN_HPP_NOEXCEPT
  52549. : taskShader( taskShader_ ), meshShader( meshShader_ )
  52550. {}
  52551. VULKAN_HPP_CONSTEXPR PhysicalDeviceMeshShaderFeaturesNV( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52552. PhysicalDeviceMeshShaderFeaturesNV( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  52553. : PhysicalDeviceMeshShaderFeaturesNV( *reinterpret_cast<PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs ) )
  52554. {}
  52555. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52556. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderFeaturesNV & operator=( PhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52557. PhysicalDeviceMeshShaderFeaturesNV & operator=( VkPhysicalDeviceMeshShaderFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  52558. {
  52559. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV const *>( &rhs );
  52560. return *this;
  52561. }
  52562. PhysicalDeviceMeshShaderFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52563. {
  52564. pNext = pNext_;
  52565. return *this;
  52566. }
  52567. PhysicalDeviceMeshShaderFeaturesNV & setTaskShader( VULKAN_HPP_NAMESPACE::Bool32 taskShader_ ) VULKAN_HPP_NOEXCEPT
  52568. {
  52569. taskShader = taskShader_;
  52570. return *this;
  52571. }
  52572. PhysicalDeviceMeshShaderFeaturesNV & setMeshShader( VULKAN_HPP_NAMESPACE::Bool32 meshShader_ ) VULKAN_HPP_NOEXCEPT
  52573. {
  52574. meshShader = meshShader_;
  52575. return *this;
  52576. }
  52577. operator VkPhysicalDeviceMeshShaderFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  52578. {
  52579. return *reinterpret_cast<const VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
  52580. }
  52581. operator VkPhysicalDeviceMeshShaderFeaturesNV &() VULKAN_HPP_NOEXCEPT
  52582. {
  52583. return *reinterpret_cast<VkPhysicalDeviceMeshShaderFeaturesNV*>( this );
  52584. }
  52585. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52586. auto operator<=>( PhysicalDeviceMeshShaderFeaturesNV const& ) const = default;
  52587. #else
  52588. bool operator==( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  52589. {
  52590. return ( sType == rhs.sType )
  52591. && ( pNext == rhs.pNext )
  52592. && ( taskShader == rhs.taskShader )
  52593. && ( meshShader == rhs.meshShader );
  52594. }
  52595. bool operator!=( PhysicalDeviceMeshShaderFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  52596. {
  52597. return !operator==( rhs );
  52598. }
  52599. #endif
  52600. public:
  52601. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderFeaturesNV;
  52602. void* pNext = {};
  52603. VULKAN_HPP_NAMESPACE::Bool32 taskShader = {};
  52604. VULKAN_HPP_NAMESPACE::Bool32 meshShader = {};
  52605. };
  52606. static_assert( sizeof( PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ), "struct and wrapper have different size!" );
  52607. static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  52608. template <>
  52609. struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderFeaturesNV>
  52610. {
  52611. using Type = PhysicalDeviceMeshShaderFeaturesNV;
  52612. };
  52613. struct PhysicalDeviceMeshShaderPropertiesNV
  52614. {
  52615. static const bool allowDuplicate = false;
  52616. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
  52617. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52618. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV(uint32_t maxDrawMeshTasksCount_ = {}, uint32_t maxTaskWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxTaskWorkGroupSize_ = {}, uint32_t maxTaskTotalMemorySize_ = {}, uint32_t maxTaskOutputCount_ = {}, uint32_t maxMeshWorkGroupInvocations_ = {}, std::array<uint32_t,3> const& maxMeshWorkGroupSize_ = {}, uint32_t maxMeshTotalMemorySize_ = {}, uint32_t maxMeshOutputVertices_ = {}, uint32_t maxMeshOutputPrimitives_ = {}, uint32_t maxMeshMultiviewViewCount_ = {}, uint32_t meshOutputPerVertexGranularity_ = {}, uint32_t meshOutputPerPrimitiveGranularity_ = {}) VULKAN_HPP_NOEXCEPT
  52619. : maxDrawMeshTasksCount( maxDrawMeshTasksCount_ ), maxTaskWorkGroupInvocations( maxTaskWorkGroupInvocations_ ), maxTaskWorkGroupSize( maxTaskWorkGroupSize_ ), maxTaskTotalMemorySize( maxTaskTotalMemorySize_ ), maxTaskOutputCount( maxTaskOutputCount_ ), maxMeshWorkGroupInvocations( maxMeshWorkGroupInvocations_ ), maxMeshWorkGroupSize( maxMeshWorkGroupSize_ ), maxMeshTotalMemorySize( maxMeshTotalMemorySize_ ), maxMeshOutputVertices( maxMeshOutputVertices_ ), maxMeshOutputPrimitives( maxMeshOutputPrimitives_ ), maxMeshMultiviewViewCount( maxMeshMultiviewViewCount_ ), meshOutputPerVertexGranularity( meshOutputPerVertexGranularity_ ), meshOutputPerPrimitiveGranularity( meshOutputPerPrimitiveGranularity_ )
  52620. {}
  52621. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52622. PhysicalDeviceMeshShaderPropertiesNV( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  52623. : PhysicalDeviceMeshShaderPropertiesNV( *reinterpret_cast<PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs ) )
  52624. {}
  52625. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52626. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMeshShaderPropertiesNV & operator=( PhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52627. PhysicalDeviceMeshShaderPropertiesNV & operator=( VkPhysicalDeviceMeshShaderPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  52628. {
  52629. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV const *>( &rhs );
  52630. return *this;
  52631. }
  52632. operator VkPhysicalDeviceMeshShaderPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  52633. {
  52634. return *reinterpret_cast<const VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
  52635. }
  52636. operator VkPhysicalDeviceMeshShaderPropertiesNV &() VULKAN_HPP_NOEXCEPT
  52637. {
  52638. return *reinterpret_cast<VkPhysicalDeviceMeshShaderPropertiesNV*>( this );
  52639. }
  52640. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52641. auto operator<=>( PhysicalDeviceMeshShaderPropertiesNV const& ) const = default;
  52642. #else
  52643. bool operator==( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  52644. {
  52645. return ( sType == rhs.sType )
  52646. && ( pNext == rhs.pNext )
  52647. && ( maxDrawMeshTasksCount == rhs.maxDrawMeshTasksCount )
  52648. && ( maxTaskWorkGroupInvocations == rhs.maxTaskWorkGroupInvocations )
  52649. && ( maxTaskWorkGroupSize == rhs.maxTaskWorkGroupSize )
  52650. && ( maxTaskTotalMemorySize == rhs.maxTaskTotalMemorySize )
  52651. && ( maxTaskOutputCount == rhs.maxTaskOutputCount )
  52652. && ( maxMeshWorkGroupInvocations == rhs.maxMeshWorkGroupInvocations )
  52653. && ( maxMeshWorkGroupSize == rhs.maxMeshWorkGroupSize )
  52654. && ( maxMeshTotalMemorySize == rhs.maxMeshTotalMemorySize )
  52655. && ( maxMeshOutputVertices == rhs.maxMeshOutputVertices )
  52656. && ( maxMeshOutputPrimitives == rhs.maxMeshOutputPrimitives )
  52657. && ( maxMeshMultiviewViewCount == rhs.maxMeshMultiviewViewCount )
  52658. && ( meshOutputPerVertexGranularity == rhs.meshOutputPerVertexGranularity )
  52659. && ( meshOutputPerPrimitiveGranularity == rhs.meshOutputPerPrimitiveGranularity );
  52660. }
  52661. bool operator!=( PhysicalDeviceMeshShaderPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  52662. {
  52663. return !operator==( rhs );
  52664. }
  52665. #endif
  52666. public:
  52667. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMeshShaderPropertiesNV;
  52668. void* pNext = {};
  52669. uint32_t maxDrawMeshTasksCount = {};
  52670. uint32_t maxTaskWorkGroupInvocations = {};
  52671. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxTaskWorkGroupSize = {};
  52672. uint32_t maxTaskTotalMemorySize = {};
  52673. uint32_t maxTaskOutputCount = {};
  52674. uint32_t maxMeshWorkGroupInvocations = {};
  52675. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> maxMeshWorkGroupSize = {};
  52676. uint32_t maxMeshTotalMemorySize = {};
  52677. uint32_t maxMeshOutputVertices = {};
  52678. uint32_t maxMeshOutputPrimitives = {};
  52679. uint32_t maxMeshMultiviewViewCount = {};
  52680. uint32_t meshOutputPerVertexGranularity = {};
  52681. uint32_t meshOutputPerPrimitiveGranularity = {};
  52682. };
  52683. static_assert( sizeof( PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ), "struct and wrapper have different size!" );
  52684. static_assert( std::is_standard_layout<PhysicalDeviceMeshShaderPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  52685. template <>
  52686. struct CppType<StructureType, StructureType::ePhysicalDeviceMeshShaderPropertiesNV>
  52687. {
  52688. using Type = PhysicalDeviceMeshShaderPropertiesNV;
  52689. };
  52690. struct PhysicalDeviceMultiviewFeatures
  52691. {
  52692. static const bool allowDuplicate = false;
  52693. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewFeatures;
  52694. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52695. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures(VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}) VULKAN_HPP_NOEXCEPT
  52696. : multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ )
  52697. {}
  52698. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewFeatures( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52699. PhysicalDeviceMultiviewFeatures( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  52700. : PhysicalDeviceMultiviewFeatures( *reinterpret_cast<PhysicalDeviceMultiviewFeatures const *>( &rhs ) )
  52701. {}
  52702. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52703. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewFeatures & operator=( PhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52704. PhysicalDeviceMultiviewFeatures & operator=( VkPhysicalDeviceMultiviewFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  52705. {
  52706. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures const *>( &rhs );
  52707. return *this;
  52708. }
  52709. PhysicalDeviceMultiviewFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52710. {
  52711. pNext = pNext_;
  52712. return *this;
  52713. }
  52714. PhysicalDeviceMultiviewFeatures & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
  52715. {
  52716. multiview = multiview_;
  52717. return *this;
  52718. }
  52719. PhysicalDeviceMultiviewFeatures & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
  52720. {
  52721. multiviewGeometryShader = multiviewGeometryShader_;
  52722. return *this;
  52723. }
  52724. PhysicalDeviceMultiviewFeatures & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
  52725. {
  52726. multiviewTessellationShader = multiviewTessellationShader_;
  52727. return *this;
  52728. }
  52729. operator VkPhysicalDeviceMultiviewFeatures const&() const VULKAN_HPP_NOEXCEPT
  52730. {
  52731. return *reinterpret_cast<const VkPhysicalDeviceMultiviewFeatures*>( this );
  52732. }
  52733. operator VkPhysicalDeviceMultiviewFeatures &() VULKAN_HPP_NOEXCEPT
  52734. {
  52735. return *reinterpret_cast<VkPhysicalDeviceMultiviewFeatures*>( this );
  52736. }
  52737. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52738. auto operator<=>( PhysicalDeviceMultiviewFeatures const& ) const = default;
  52739. #else
  52740. bool operator==( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  52741. {
  52742. return ( sType == rhs.sType )
  52743. && ( pNext == rhs.pNext )
  52744. && ( multiview == rhs.multiview )
  52745. && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
  52746. && ( multiviewTessellationShader == rhs.multiviewTessellationShader );
  52747. }
  52748. bool operator!=( PhysicalDeviceMultiviewFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  52749. {
  52750. return !operator==( rhs );
  52751. }
  52752. #endif
  52753. public:
  52754. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewFeatures;
  52755. void* pNext = {};
  52756. VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
  52757. VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
  52758. VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
  52759. };
  52760. static_assert( sizeof( PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ), "struct and wrapper have different size!" );
  52761. static_assert( std::is_standard_layout<PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
  52762. template <>
  52763. struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewFeatures>
  52764. {
  52765. using Type = PhysicalDeviceMultiviewFeatures;
  52766. };
  52767. using PhysicalDeviceMultiviewFeaturesKHR = PhysicalDeviceMultiviewFeatures;
  52768. struct PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX
  52769. {
  52770. static const bool allowDuplicate = false;
  52771. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  52772. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52773. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX(VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents_ = {}) VULKAN_HPP_NOEXCEPT
  52774. : perViewPositionAllComponents( perViewPositionAllComponents_ )
  52775. {}
  52776. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52777. PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  52778. : PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX( *reinterpret_cast<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs ) )
  52779. {}
  52780. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52781. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52782. PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX & operator=( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const & rhs ) VULKAN_HPP_NOEXCEPT
  52783. {
  52784. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const *>( &rhs );
  52785. return *this;
  52786. }
  52787. operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const&() const VULKAN_HPP_NOEXCEPT
  52788. {
  52789. return *reinterpret_cast<const VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
  52790. }
  52791. operator VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX &() VULKAN_HPP_NOEXCEPT
  52792. {
  52793. return *reinterpret_cast<VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX*>( this );
  52794. }
  52795. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52796. auto operator<=>( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& ) const = default;
  52797. #else
  52798. bool operator==( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  52799. {
  52800. return ( sType == rhs.sType )
  52801. && ( pNext == rhs.pNext )
  52802. && ( perViewPositionAllComponents == rhs.perViewPositionAllComponents );
  52803. }
  52804. bool operator!=( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX const& rhs ) const VULKAN_HPP_NOEXCEPT
  52805. {
  52806. return !operator==( rhs );
  52807. }
  52808. #endif
  52809. public:
  52810. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  52811. void* pNext = {};
  52812. VULKAN_HPP_NAMESPACE::Bool32 perViewPositionAllComponents = {};
  52813. };
  52814. static_assert( sizeof( PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) == sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ), "struct and wrapper have different size!" );
  52815. static_assert( std::is_standard_layout<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
  52816. template <>
  52817. struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>
  52818. {
  52819. using Type = PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
  52820. };
  52821. struct PhysicalDeviceMultiviewProperties
  52822. {
  52823. static const bool allowDuplicate = false;
  52824. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMultiviewProperties;
  52825. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52826. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties(uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}) VULKAN_HPP_NOEXCEPT
  52827. : maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ )
  52828. {}
  52829. VULKAN_HPP_CONSTEXPR PhysicalDeviceMultiviewProperties( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52830. PhysicalDeviceMultiviewProperties( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  52831. : PhysicalDeviceMultiviewProperties( *reinterpret_cast<PhysicalDeviceMultiviewProperties const *>( &rhs ) )
  52832. {}
  52833. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52834. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMultiviewProperties & operator=( PhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52835. PhysicalDeviceMultiviewProperties & operator=( VkPhysicalDeviceMultiviewProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  52836. {
  52837. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties const *>( &rhs );
  52838. return *this;
  52839. }
  52840. operator VkPhysicalDeviceMultiviewProperties const&() const VULKAN_HPP_NOEXCEPT
  52841. {
  52842. return *reinterpret_cast<const VkPhysicalDeviceMultiviewProperties*>( this );
  52843. }
  52844. operator VkPhysicalDeviceMultiviewProperties &() VULKAN_HPP_NOEXCEPT
  52845. {
  52846. return *reinterpret_cast<VkPhysicalDeviceMultiviewProperties*>( this );
  52847. }
  52848. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52849. auto operator<=>( PhysicalDeviceMultiviewProperties const& ) const = default;
  52850. #else
  52851. bool operator==( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  52852. {
  52853. return ( sType == rhs.sType )
  52854. && ( pNext == rhs.pNext )
  52855. && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
  52856. && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex );
  52857. }
  52858. bool operator!=( PhysicalDeviceMultiviewProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  52859. {
  52860. return !operator==( rhs );
  52861. }
  52862. #endif
  52863. public:
  52864. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMultiviewProperties;
  52865. void* pNext = {};
  52866. uint32_t maxMultiviewViewCount = {};
  52867. uint32_t maxMultiviewInstanceIndex = {};
  52868. };
  52869. static_assert( sizeof( PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ), "struct and wrapper have different size!" );
  52870. static_assert( std::is_standard_layout<PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
  52871. template <>
  52872. struct CppType<StructureType, StructureType::ePhysicalDeviceMultiviewProperties>
  52873. {
  52874. using Type = PhysicalDeviceMultiviewProperties;
  52875. };
  52876. using PhysicalDeviceMultiviewPropertiesKHR = PhysicalDeviceMultiviewProperties;
  52877. struct PhysicalDeviceMutableDescriptorTypeFeaturesVALVE
  52878. {
  52879. static const bool allowDuplicate = false;
  52880. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
  52881. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52882. VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE(VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ = {}) VULKAN_HPP_NOEXCEPT
  52883. : mutableDescriptorType( mutableDescriptorType_ )
  52884. {}
  52885. VULKAN_HPP_CONSTEXPR PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52886. PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  52887. : PhysicalDeviceMutableDescriptorTypeFeaturesVALVE( *reinterpret_cast<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs ) )
  52888. {}
  52889. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52890. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & operator=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52891. PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & operator=( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const & rhs ) VULKAN_HPP_NOEXCEPT
  52892. {
  52893. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const *>( &rhs );
  52894. return *this;
  52895. }
  52896. PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  52897. {
  52898. pNext = pNext_;
  52899. return *this;
  52900. }
  52901. PhysicalDeviceMutableDescriptorTypeFeaturesVALVE & setMutableDescriptorType( VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType_ ) VULKAN_HPP_NOEXCEPT
  52902. {
  52903. mutableDescriptorType = mutableDescriptorType_;
  52904. return *this;
  52905. }
  52906. operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE const&() const VULKAN_HPP_NOEXCEPT
  52907. {
  52908. return *reinterpret_cast<const VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE*>( this );
  52909. }
  52910. operator VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE &() VULKAN_HPP_NOEXCEPT
  52911. {
  52912. return *reinterpret_cast<VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE*>( this );
  52913. }
  52914. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52915. auto operator<=>( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const& ) const = default;
  52916. #else
  52917. bool operator==( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  52918. {
  52919. return ( sType == rhs.sType )
  52920. && ( pNext == rhs.pNext )
  52921. && ( mutableDescriptorType == rhs.mutableDescriptorType );
  52922. }
  52923. bool operator!=( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE const& rhs ) const VULKAN_HPP_NOEXCEPT
  52924. {
  52925. return !operator==( rhs );
  52926. }
  52927. #endif
  52928. public:
  52929. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
  52930. void* pNext = {};
  52931. VULKAN_HPP_NAMESPACE::Bool32 mutableDescriptorType = {};
  52932. };
  52933. static_assert( sizeof( PhysicalDeviceMutableDescriptorTypeFeaturesVALVE ) == sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE ), "struct and wrapper have different size!" );
  52934. static_assert( std::is_standard_layout<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE>::value, "struct wrapper is not a standard layout!" );
  52935. template <>
  52936. struct CppType<StructureType, StructureType::ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE>
  52937. {
  52938. using Type = PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
  52939. };
  52940. struct PhysicalDevicePCIBusInfoPropertiesEXT
  52941. {
  52942. static const bool allowDuplicate = false;
  52943. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
  52944. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52945. VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT(uint32_t pciDomain_ = {}, uint32_t pciBus_ = {}, uint32_t pciDevice_ = {}, uint32_t pciFunction_ = {}) VULKAN_HPP_NOEXCEPT
  52946. : pciDomain( pciDomain_ ), pciBus( pciBus_ ), pciDevice( pciDevice_ ), pciFunction( pciFunction_ )
  52947. {}
  52948. VULKAN_HPP_CONSTEXPR PhysicalDevicePCIBusInfoPropertiesEXT( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52949. PhysicalDevicePCIBusInfoPropertiesEXT( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52950. : PhysicalDevicePCIBusInfoPropertiesEXT( *reinterpret_cast<PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs ) )
  52951. {}
  52952. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  52953. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePCIBusInfoPropertiesEXT & operator=( PhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  52954. PhysicalDevicePCIBusInfoPropertiesEXT & operator=( VkPhysicalDevicePCIBusInfoPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  52955. {
  52956. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT const *>( &rhs );
  52957. return *this;
  52958. }
  52959. operator VkPhysicalDevicePCIBusInfoPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  52960. {
  52961. return *reinterpret_cast<const VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
  52962. }
  52963. operator VkPhysicalDevicePCIBusInfoPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  52964. {
  52965. return *reinterpret_cast<VkPhysicalDevicePCIBusInfoPropertiesEXT*>( this );
  52966. }
  52967. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  52968. auto operator<=>( PhysicalDevicePCIBusInfoPropertiesEXT const& ) const = default;
  52969. #else
  52970. bool operator==( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52971. {
  52972. return ( sType == rhs.sType )
  52973. && ( pNext == rhs.pNext )
  52974. && ( pciDomain == rhs.pciDomain )
  52975. && ( pciBus == rhs.pciBus )
  52976. && ( pciDevice == rhs.pciDevice )
  52977. && ( pciFunction == rhs.pciFunction );
  52978. }
  52979. bool operator!=( PhysicalDevicePCIBusInfoPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  52980. {
  52981. return !operator==( rhs );
  52982. }
  52983. #endif
  52984. public:
  52985. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePciBusInfoPropertiesEXT;
  52986. void* pNext = {};
  52987. uint32_t pciDomain = {};
  52988. uint32_t pciBus = {};
  52989. uint32_t pciDevice = {};
  52990. uint32_t pciFunction = {};
  52991. };
  52992. static_assert( sizeof( PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ), "struct and wrapper have different size!" );
  52993. static_assert( std::is_standard_layout<PhysicalDevicePCIBusInfoPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  52994. template <>
  52995. struct CppType<StructureType, StructureType::ePhysicalDevicePciBusInfoPropertiesEXT>
  52996. {
  52997. using Type = PhysicalDevicePCIBusInfoPropertiesEXT;
  52998. };
  52999. struct PhysicalDevicePerformanceQueryFeaturesKHR
  53000. {
  53001. static const bool allowDuplicate = false;
  53002. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
  53003. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53004. VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ = {}) VULKAN_HPP_NOEXCEPT
  53005. : performanceCounterQueryPools( performanceCounterQueryPools_ ), performanceCounterMultipleQueryPools( performanceCounterMultipleQueryPools_ )
  53006. {}
  53007. VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryFeaturesKHR( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53008. PhysicalDevicePerformanceQueryFeaturesKHR( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53009. : PhysicalDevicePerformanceQueryFeaturesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs ) )
  53010. {}
  53011. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53012. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryFeaturesKHR & operator=( PhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53013. PhysicalDevicePerformanceQueryFeaturesKHR & operator=( VkPhysicalDevicePerformanceQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53014. {
  53015. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR const *>( &rhs );
  53016. return *this;
  53017. }
  53018. PhysicalDevicePerformanceQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53019. {
  53020. pNext = pNext_;
  53021. return *this;
  53022. }
  53023. PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools_ ) VULKAN_HPP_NOEXCEPT
  53024. {
  53025. performanceCounterQueryPools = performanceCounterQueryPools_;
  53026. return *this;
  53027. }
  53028. PhysicalDevicePerformanceQueryFeaturesKHR & setPerformanceCounterMultipleQueryPools( VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools_ ) VULKAN_HPP_NOEXCEPT
  53029. {
  53030. performanceCounterMultipleQueryPools = performanceCounterMultipleQueryPools_;
  53031. return *this;
  53032. }
  53033. operator VkPhysicalDevicePerformanceQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  53034. {
  53035. return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
  53036. }
  53037. operator VkPhysicalDevicePerformanceQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  53038. {
  53039. return *reinterpret_cast<VkPhysicalDevicePerformanceQueryFeaturesKHR*>( this );
  53040. }
  53041. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53042. auto operator<=>( PhysicalDevicePerformanceQueryFeaturesKHR const& ) const = default;
  53043. #else
  53044. bool operator==( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53045. {
  53046. return ( sType == rhs.sType )
  53047. && ( pNext == rhs.pNext )
  53048. && ( performanceCounterQueryPools == rhs.performanceCounterQueryPools )
  53049. && ( performanceCounterMultipleQueryPools == rhs.performanceCounterMultipleQueryPools );
  53050. }
  53051. bool operator!=( PhysicalDevicePerformanceQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53052. {
  53053. return !operator==( rhs );
  53054. }
  53055. #endif
  53056. public:
  53057. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR;
  53058. void* pNext = {};
  53059. VULKAN_HPP_NAMESPACE::Bool32 performanceCounterQueryPools = {};
  53060. VULKAN_HPP_NAMESPACE::Bool32 performanceCounterMultipleQueryPools = {};
  53061. };
  53062. static_assert( sizeof( PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ), "struct and wrapper have different size!" );
  53063. static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  53064. template <>
  53065. struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryFeaturesKHR>
  53066. {
  53067. using Type = PhysicalDevicePerformanceQueryFeaturesKHR;
  53068. };
  53069. struct PhysicalDevicePerformanceQueryPropertiesKHR
  53070. {
  53071. static const bool allowDuplicate = false;
  53072. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
  53073. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53074. VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR(VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies_ = {}) VULKAN_HPP_NOEXCEPT
  53075. : allowCommandBufferQueryCopies( allowCommandBufferQueryCopies_ )
  53076. {}
  53077. VULKAN_HPP_CONSTEXPR PhysicalDevicePerformanceQueryPropertiesKHR( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53078. PhysicalDevicePerformanceQueryPropertiesKHR( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53079. : PhysicalDevicePerformanceQueryPropertiesKHR( *reinterpret_cast<PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs ) )
  53080. {}
  53081. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53082. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePerformanceQueryPropertiesKHR & operator=( PhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53083. PhysicalDevicePerformanceQueryPropertiesKHR & operator=( VkPhysicalDevicePerformanceQueryPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53084. {
  53085. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR const *>( &rhs );
  53086. return *this;
  53087. }
  53088. operator VkPhysicalDevicePerformanceQueryPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  53089. {
  53090. return *reinterpret_cast<const VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
  53091. }
  53092. operator VkPhysicalDevicePerformanceQueryPropertiesKHR &() VULKAN_HPP_NOEXCEPT
  53093. {
  53094. return *reinterpret_cast<VkPhysicalDevicePerformanceQueryPropertiesKHR*>( this );
  53095. }
  53096. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53097. auto operator<=>( PhysicalDevicePerformanceQueryPropertiesKHR const& ) const = default;
  53098. #else
  53099. bool operator==( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53100. {
  53101. return ( sType == rhs.sType )
  53102. && ( pNext == rhs.pNext )
  53103. && ( allowCommandBufferQueryCopies == rhs.allowCommandBufferQueryCopies );
  53104. }
  53105. bool operator!=( PhysicalDevicePerformanceQueryPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53106. {
  53107. return !operator==( rhs );
  53108. }
  53109. #endif
  53110. public:
  53111. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR;
  53112. void* pNext = {};
  53113. VULKAN_HPP_NAMESPACE::Bool32 allowCommandBufferQueryCopies = {};
  53114. };
  53115. static_assert( sizeof( PhysicalDevicePerformanceQueryPropertiesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ), "struct and wrapper have different size!" );
  53116. static_assert( std::is_standard_layout<PhysicalDevicePerformanceQueryPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  53117. template <>
  53118. struct CppType<StructureType, StructureType::ePhysicalDevicePerformanceQueryPropertiesKHR>
  53119. {
  53120. using Type = PhysicalDevicePerformanceQueryPropertiesKHR;
  53121. };
  53122. struct PhysicalDevicePipelineCreationCacheControlFeaturesEXT
  53123. {
  53124. static const bool allowDuplicate = false;
  53125. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
  53126. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53127. VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ = {}) VULKAN_HPP_NOEXCEPT
  53128. : pipelineCreationCacheControl( pipelineCreationCacheControl_ )
  53129. {}
  53130. VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineCreationCacheControlFeaturesEXT( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53131. PhysicalDevicePipelineCreationCacheControlFeaturesEXT( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  53132. : PhysicalDevicePipelineCreationCacheControlFeaturesEXT( *reinterpret_cast<PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs ) )
  53133. {}
  53134. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53135. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53136. PhysicalDevicePipelineCreationCacheControlFeaturesEXT & operator=( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  53137. {
  53138. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT const *>( &rhs );
  53139. return *this;
  53140. }
  53141. PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53142. {
  53143. pNext = pNext_;
  53144. return *this;
  53145. }
  53146. PhysicalDevicePipelineCreationCacheControlFeaturesEXT & setPipelineCreationCacheControl( VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl_ ) VULKAN_HPP_NOEXCEPT
  53147. {
  53148. pipelineCreationCacheControl = pipelineCreationCacheControl_;
  53149. return *this;
  53150. }
  53151. operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  53152. {
  53153. return *reinterpret_cast<const VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
  53154. }
  53155. operator VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  53156. {
  53157. return *reinterpret_cast<VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT*>( this );
  53158. }
  53159. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53160. auto operator<=>( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& ) const = default;
  53161. #else
  53162. bool operator==( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  53163. {
  53164. return ( sType == rhs.sType )
  53165. && ( pNext == rhs.pNext )
  53166. && ( pipelineCreationCacheControl == rhs.pipelineCreationCacheControl );
  53167. }
  53168. bool operator!=( PhysicalDevicePipelineCreationCacheControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  53169. {
  53170. return !operator==( rhs );
  53171. }
  53172. #endif
  53173. public:
  53174. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT;
  53175. void* pNext = {};
  53176. VULKAN_HPP_NAMESPACE::Bool32 pipelineCreationCacheControl = {};
  53177. };
  53178. static_assert( sizeof( PhysicalDevicePipelineCreationCacheControlFeaturesEXT ) == sizeof( VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT ), "struct and wrapper have different size!" );
  53179. static_assert( std::is_standard_layout<PhysicalDevicePipelineCreationCacheControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  53180. template <>
  53181. struct CppType<StructureType, StructureType::ePhysicalDevicePipelineCreationCacheControlFeaturesEXT>
  53182. {
  53183. using Type = PhysicalDevicePipelineCreationCacheControlFeaturesEXT;
  53184. };
  53185. struct PhysicalDevicePipelineExecutablePropertiesFeaturesKHR
  53186. {
  53187. static const bool allowDuplicate = false;
  53188. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  53189. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53190. VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ = {}) VULKAN_HPP_NOEXCEPT
  53191. : pipelineExecutableInfo( pipelineExecutableInfo_ )
  53192. {}
  53193. VULKAN_HPP_CONSTEXPR PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53194. PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53195. : PhysicalDevicePipelineExecutablePropertiesFeaturesKHR( *reinterpret_cast<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs ) )
  53196. {}
  53197. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53198. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53199. PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & operator=( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53200. {
  53201. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const *>( &rhs );
  53202. return *this;
  53203. }
  53204. PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53205. {
  53206. pNext = pNext_;
  53207. return *this;
  53208. }
  53209. PhysicalDevicePipelineExecutablePropertiesFeaturesKHR & setPipelineExecutableInfo( VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo_ ) VULKAN_HPP_NOEXCEPT
  53210. {
  53211. pipelineExecutableInfo = pipelineExecutableInfo_;
  53212. return *this;
  53213. }
  53214. operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  53215. {
  53216. return *reinterpret_cast<const VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
  53217. }
  53218. operator VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  53219. {
  53220. return *reinterpret_cast<VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR*>( this );
  53221. }
  53222. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53223. auto operator<=>( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& ) const = default;
  53224. #else
  53225. bool operator==( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53226. {
  53227. return ( sType == rhs.sType )
  53228. && ( pNext == rhs.pNext )
  53229. && ( pipelineExecutableInfo == rhs.pipelineExecutableInfo );
  53230. }
  53231. bool operator!=( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53232. {
  53233. return !operator==( rhs );
  53234. }
  53235. #endif
  53236. public:
  53237. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  53238. void* pNext = {};
  53239. VULKAN_HPP_NAMESPACE::Bool32 pipelineExecutableInfo = {};
  53240. };
  53241. static_assert( sizeof( PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) == sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ), "struct and wrapper have different size!" );
  53242. static_assert( std::is_standard_layout<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  53243. template <>
  53244. struct CppType<StructureType, StructureType::ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR>
  53245. {
  53246. using Type = PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
  53247. };
  53248. struct PhysicalDevicePointClippingProperties
  53249. {
  53250. static const bool allowDuplicate = false;
  53251. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePointClippingProperties;
  53252. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53253. VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties(VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes) VULKAN_HPP_NOEXCEPT
  53254. : pointClippingBehavior( pointClippingBehavior_ )
  53255. {}
  53256. VULKAN_HPP_CONSTEXPR PhysicalDevicePointClippingProperties( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53257. PhysicalDevicePointClippingProperties( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  53258. : PhysicalDevicePointClippingProperties( *reinterpret_cast<PhysicalDevicePointClippingProperties const *>( &rhs ) )
  53259. {}
  53260. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53261. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePointClippingProperties & operator=( PhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53262. PhysicalDevicePointClippingProperties & operator=( VkPhysicalDevicePointClippingProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  53263. {
  53264. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties const *>( &rhs );
  53265. return *this;
  53266. }
  53267. operator VkPhysicalDevicePointClippingProperties const&() const VULKAN_HPP_NOEXCEPT
  53268. {
  53269. return *reinterpret_cast<const VkPhysicalDevicePointClippingProperties*>( this );
  53270. }
  53271. operator VkPhysicalDevicePointClippingProperties &() VULKAN_HPP_NOEXCEPT
  53272. {
  53273. return *reinterpret_cast<VkPhysicalDevicePointClippingProperties*>( this );
  53274. }
  53275. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53276. auto operator<=>( PhysicalDevicePointClippingProperties const& ) const = default;
  53277. #else
  53278. bool operator==( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  53279. {
  53280. return ( sType == rhs.sType )
  53281. && ( pNext == rhs.pNext )
  53282. && ( pointClippingBehavior == rhs.pointClippingBehavior );
  53283. }
  53284. bool operator!=( PhysicalDevicePointClippingProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  53285. {
  53286. return !operator==( rhs );
  53287. }
  53288. #endif
  53289. public:
  53290. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePointClippingProperties;
  53291. void* pNext = {};
  53292. VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
  53293. };
  53294. static_assert( sizeof( PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ), "struct and wrapper have different size!" );
  53295. static_assert( std::is_standard_layout<PhysicalDevicePointClippingProperties>::value, "struct wrapper is not a standard layout!" );
  53296. template <>
  53297. struct CppType<StructureType, StructureType::ePhysicalDevicePointClippingProperties>
  53298. {
  53299. using Type = PhysicalDevicePointClippingProperties;
  53300. };
  53301. using PhysicalDevicePointClippingPropertiesKHR = PhysicalDevicePointClippingProperties;
  53302. #ifdef VK_ENABLE_BETA_EXTENSIONS
  53303. struct PhysicalDevicePortabilitySubsetFeaturesKHR
  53304. {
  53305. static const bool allowDuplicate = false;
  53306. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
  53307. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53308. VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ = {}, VULKAN_HPP_NAMESPACE::Bool32 events_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ = {}, VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ = {}, VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ = {}) VULKAN_HPP_NOEXCEPT
  53309. : constantAlphaColorBlendFactors( constantAlphaColorBlendFactors_ ), events( events_ ), imageViewFormatReinterpretation( imageViewFormatReinterpretation_ ), imageViewFormatSwizzle( imageViewFormatSwizzle_ ), imageView2DOn3DImage( imageView2DOn3DImage_ ), multisampleArrayImage( multisampleArrayImage_ ), mutableComparisonSamplers( mutableComparisonSamplers_ ), pointPolygons( pointPolygons_ ), samplerMipLodBias( samplerMipLodBias_ ), separateStencilMaskRef( separateStencilMaskRef_ ), shaderSampleRateInterpolationFunctions( shaderSampleRateInterpolationFunctions_ ), tessellationIsolines( tessellationIsolines_ ), tessellationPointMode( tessellationPointMode_ ), triangleFans( triangleFans_ ), vertexAttributeAccessBeyondStride( vertexAttributeAccessBeyondStride_ )
  53310. {}
  53311. VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetFeaturesKHR( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53312. PhysicalDevicePortabilitySubsetFeaturesKHR( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53313. : PhysicalDevicePortabilitySubsetFeaturesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs ) )
  53314. {}
  53315. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53316. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( PhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53317. PhysicalDevicePortabilitySubsetFeaturesKHR & operator=( VkPhysicalDevicePortabilitySubsetFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53318. {
  53319. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR const *>( &rhs );
  53320. return *this;
  53321. }
  53322. PhysicalDevicePortabilitySubsetFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53323. {
  53324. pNext = pNext_;
  53325. return *this;
  53326. }
  53327. PhysicalDevicePortabilitySubsetFeaturesKHR & setConstantAlphaColorBlendFactors( VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors_ ) VULKAN_HPP_NOEXCEPT
  53328. {
  53329. constantAlphaColorBlendFactors = constantAlphaColorBlendFactors_;
  53330. return *this;
  53331. }
  53332. PhysicalDevicePortabilitySubsetFeaturesKHR & setEvents( VULKAN_HPP_NAMESPACE::Bool32 events_ ) VULKAN_HPP_NOEXCEPT
  53333. {
  53334. events = events_;
  53335. return *this;
  53336. }
  53337. PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatReinterpretation( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation_ ) VULKAN_HPP_NOEXCEPT
  53338. {
  53339. imageViewFormatReinterpretation = imageViewFormatReinterpretation_;
  53340. return *this;
  53341. }
  53342. PhysicalDevicePortabilitySubsetFeaturesKHR & setImageViewFormatSwizzle( VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle_ ) VULKAN_HPP_NOEXCEPT
  53343. {
  53344. imageViewFormatSwizzle = imageViewFormatSwizzle_;
  53345. return *this;
  53346. }
  53347. PhysicalDevicePortabilitySubsetFeaturesKHR & setImageView2DOn3DImage( VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage_ ) VULKAN_HPP_NOEXCEPT
  53348. {
  53349. imageView2DOn3DImage = imageView2DOn3DImage_;
  53350. return *this;
  53351. }
  53352. PhysicalDevicePortabilitySubsetFeaturesKHR & setMultisampleArrayImage( VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage_ ) VULKAN_HPP_NOEXCEPT
  53353. {
  53354. multisampleArrayImage = multisampleArrayImage_;
  53355. return *this;
  53356. }
  53357. PhysicalDevicePortabilitySubsetFeaturesKHR & setMutableComparisonSamplers( VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers_ ) VULKAN_HPP_NOEXCEPT
  53358. {
  53359. mutableComparisonSamplers = mutableComparisonSamplers_;
  53360. return *this;
  53361. }
  53362. PhysicalDevicePortabilitySubsetFeaturesKHR & setPointPolygons( VULKAN_HPP_NAMESPACE::Bool32 pointPolygons_ ) VULKAN_HPP_NOEXCEPT
  53363. {
  53364. pointPolygons = pointPolygons_;
  53365. return *this;
  53366. }
  53367. PhysicalDevicePortabilitySubsetFeaturesKHR & setSamplerMipLodBias( VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias_ ) VULKAN_HPP_NOEXCEPT
  53368. {
  53369. samplerMipLodBias = samplerMipLodBias_;
  53370. return *this;
  53371. }
  53372. PhysicalDevicePortabilitySubsetFeaturesKHR & setSeparateStencilMaskRef( VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef_ ) VULKAN_HPP_NOEXCEPT
  53373. {
  53374. separateStencilMaskRef = separateStencilMaskRef_;
  53375. return *this;
  53376. }
  53377. PhysicalDevicePortabilitySubsetFeaturesKHR & setShaderSampleRateInterpolationFunctions( VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions_ ) VULKAN_HPP_NOEXCEPT
  53378. {
  53379. shaderSampleRateInterpolationFunctions = shaderSampleRateInterpolationFunctions_;
  53380. return *this;
  53381. }
  53382. PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationIsolines( VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines_ ) VULKAN_HPP_NOEXCEPT
  53383. {
  53384. tessellationIsolines = tessellationIsolines_;
  53385. return *this;
  53386. }
  53387. PhysicalDevicePortabilitySubsetFeaturesKHR & setTessellationPointMode( VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode_ ) VULKAN_HPP_NOEXCEPT
  53388. {
  53389. tessellationPointMode = tessellationPointMode_;
  53390. return *this;
  53391. }
  53392. PhysicalDevicePortabilitySubsetFeaturesKHR & setTriangleFans( VULKAN_HPP_NAMESPACE::Bool32 triangleFans_ ) VULKAN_HPP_NOEXCEPT
  53393. {
  53394. triangleFans = triangleFans_;
  53395. return *this;
  53396. }
  53397. PhysicalDevicePortabilitySubsetFeaturesKHR & setVertexAttributeAccessBeyondStride( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride_ ) VULKAN_HPP_NOEXCEPT
  53398. {
  53399. vertexAttributeAccessBeyondStride = vertexAttributeAccessBeyondStride_;
  53400. return *this;
  53401. }
  53402. operator VkPhysicalDevicePortabilitySubsetFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  53403. {
  53404. return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
  53405. }
  53406. operator VkPhysicalDevicePortabilitySubsetFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  53407. {
  53408. return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetFeaturesKHR*>( this );
  53409. }
  53410. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53411. auto operator<=>( PhysicalDevicePortabilitySubsetFeaturesKHR const& ) const = default;
  53412. #else
  53413. bool operator==( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53414. {
  53415. return ( sType == rhs.sType )
  53416. && ( pNext == rhs.pNext )
  53417. && ( constantAlphaColorBlendFactors == rhs.constantAlphaColorBlendFactors )
  53418. && ( events == rhs.events )
  53419. && ( imageViewFormatReinterpretation == rhs.imageViewFormatReinterpretation )
  53420. && ( imageViewFormatSwizzle == rhs.imageViewFormatSwizzle )
  53421. && ( imageView2DOn3DImage == rhs.imageView2DOn3DImage )
  53422. && ( multisampleArrayImage == rhs.multisampleArrayImage )
  53423. && ( mutableComparisonSamplers == rhs.mutableComparisonSamplers )
  53424. && ( pointPolygons == rhs.pointPolygons )
  53425. && ( samplerMipLodBias == rhs.samplerMipLodBias )
  53426. && ( separateStencilMaskRef == rhs.separateStencilMaskRef )
  53427. && ( shaderSampleRateInterpolationFunctions == rhs.shaderSampleRateInterpolationFunctions )
  53428. && ( tessellationIsolines == rhs.tessellationIsolines )
  53429. && ( tessellationPointMode == rhs.tessellationPointMode )
  53430. && ( triangleFans == rhs.triangleFans )
  53431. && ( vertexAttributeAccessBeyondStride == rhs.vertexAttributeAccessBeyondStride );
  53432. }
  53433. bool operator!=( PhysicalDevicePortabilitySubsetFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53434. {
  53435. return !operator==( rhs );
  53436. }
  53437. #endif
  53438. public:
  53439. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR;
  53440. void* pNext = {};
  53441. VULKAN_HPP_NAMESPACE::Bool32 constantAlphaColorBlendFactors = {};
  53442. VULKAN_HPP_NAMESPACE::Bool32 events = {};
  53443. VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatReinterpretation = {};
  53444. VULKAN_HPP_NAMESPACE::Bool32 imageViewFormatSwizzle = {};
  53445. VULKAN_HPP_NAMESPACE::Bool32 imageView2DOn3DImage = {};
  53446. VULKAN_HPP_NAMESPACE::Bool32 multisampleArrayImage = {};
  53447. VULKAN_HPP_NAMESPACE::Bool32 mutableComparisonSamplers = {};
  53448. VULKAN_HPP_NAMESPACE::Bool32 pointPolygons = {};
  53449. VULKAN_HPP_NAMESPACE::Bool32 samplerMipLodBias = {};
  53450. VULKAN_HPP_NAMESPACE::Bool32 separateStencilMaskRef = {};
  53451. VULKAN_HPP_NAMESPACE::Bool32 shaderSampleRateInterpolationFunctions = {};
  53452. VULKAN_HPP_NAMESPACE::Bool32 tessellationIsolines = {};
  53453. VULKAN_HPP_NAMESPACE::Bool32 tessellationPointMode = {};
  53454. VULKAN_HPP_NAMESPACE::Bool32 triangleFans = {};
  53455. VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeAccessBeyondStride = {};
  53456. };
  53457. static_assert( sizeof( PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ), "struct and wrapper have different size!" );
  53458. static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  53459. template <>
  53460. struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetFeaturesKHR>
  53461. {
  53462. using Type = PhysicalDevicePortabilitySubsetFeaturesKHR;
  53463. };
  53464. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  53465. #ifdef VK_ENABLE_BETA_EXTENSIONS
  53466. struct PhysicalDevicePortabilitySubsetPropertiesKHR
  53467. {
  53468. static const bool allowDuplicate = false;
  53469. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
  53470. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53471. VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR(uint32_t minVertexInputBindingStrideAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  53472. : minVertexInputBindingStrideAlignment( minVertexInputBindingStrideAlignment_ )
  53473. {}
  53474. VULKAN_HPP_CONSTEXPR PhysicalDevicePortabilitySubsetPropertiesKHR( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53475. PhysicalDevicePortabilitySubsetPropertiesKHR( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53476. : PhysicalDevicePortabilitySubsetPropertiesKHR( *reinterpret_cast<PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs ) )
  53477. {}
  53478. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53479. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( PhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53480. PhysicalDevicePortabilitySubsetPropertiesKHR & operator=( VkPhysicalDevicePortabilitySubsetPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53481. {
  53482. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR const *>( &rhs );
  53483. return *this;
  53484. }
  53485. PhysicalDevicePortabilitySubsetPropertiesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53486. {
  53487. pNext = pNext_;
  53488. return *this;
  53489. }
  53490. PhysicalDevicePortabilitySubsetPropertiesKHR & setMinVertexInputBindingStrideAlignment( uint32_t minVertexInputBindingStrideAlignment_ ) VULKAN_HPP_NOEXCEPT
  53491. {
  53492. minVertexInputBindingStrideAlignment = minVertexInputBindingStrideAlignment_;
  53493. return *this;
  53494. }
  53495. operator VkPhysicalDevicePortabilitySubsetPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  53496. {
  53497. return *reinterpret_cast<const VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
  53498. }
  53499. operator VkPhysicalDevicePortabilitySubsetPropertiesKHR &() VULKAN_HPP_NOEXCEPT
  53500. {
  53501. return *reinterpret_cast<VkPhysicalDevicePortabilitySubsetPropertiesKHR*>( this );
  53502. }
  53503. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53504. auto operator<=>( PhysicalDevicePortabilitySubsetPropertiesKHR const& ) const = default;
  53505. #else
  53506. bool operator==( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53507. {
  53508. return ( sType == rhs.sType )
  53509. && ( pNext == rhs.pNext )
  53510. && ( minVertexInputBindingStrideAlignment == rhs.minVertexInputBindingStrideAlignment );
  53511. }
  53512. bool operator!=( PhysicalDevicePortabilitySubsetPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53513. {
  53514. return !operator==( rhs );
  53515. }
  53516. #endif
  53517. public:
  53518. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR;
  53519. void* pNext = {};
  53520. uint32_t minVertexInputBindingStrideAlignment = {};
  53521. };
  53522. static_assert( sizeof( PhysicalDevicePortabilitySubsetPropertiesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ), "struct and wrapper have different size!" );
  53523. static_assert( std::is_standard_layout<PhysicalDevicePortabilitySubsetPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  53524. template <>
  53525. struct CppType<StructureType, StructureType::ePhysicalDevicePortabilitySubsetPropertiesKHR>
  53526. {
  53527. using Type = PhysicalDevicePortabilitySubsetPropertiesKHR;
  53528. };
  53529. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  53530. struct PhysicalDevicePrivateDataFeaturesEXT
  53531. {
  53532. static const bool allowDuplicate = false;
  53533. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
  53534. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53535. VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 privateData_ = {}) VULKAN_HPP_NOEXCEPT
  53536. : privateData( privateData_ )
  53537. {}
  53538. VULKAN_HPP_CONSTEXPR PhysicalDevicePrivateDataFeaturesEXT( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53539. PhysicalDevicePrivateDataFeaturesEXT( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  53540. : PhysicalDevicePrivateDataFeaturesEXT( *reinterpret_cast<PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs ) )
  53541. {}
  53542. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53543. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePrivateDataFeaturesEXT & operator=( PhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53544. PhysicalDevicePrivateDataFeaturesEXT & operator=( VkPhysicalDevicePrivateDataFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  53545. {
  53546. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT const *>( &rhs );
  53547. return *this;
  53548. }
  53549. PhysicalDevicePrivateDataFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53550. {
  53551. pNext = pNext_;
  53552. return *this;
  53553. }
  53554. PhysicalDevicePrivateDataFeaturesEXT & setPrivateData( VULKAN_HPP_NAMESPACE::Bool32 privateData_ ) VULKAN_HPP_NOEXCEPT
  53555. {
  53556. privateData = privateData_;
  53557. return *this;
  53558. }
  53559. operator VkPhysicalDevicePrivateDataFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  53560. {
  53561. return *reinterpret_cast<const VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
  53562. }
  53563. operator VkPhysicalDevicePrivateDataFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  53564. {
  53565. return *reinterpret_cast<VkPhysicalDevicePrivateDataFeaturesEXT*>( this );
  53566. }
  53567. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53568. auto operator<=>( PhysicalDevicePrivateDataFeaturesEXT const& ) const = default;
  53569. #else
  53570. bool operator==( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  53571. {
  53572. return ( sType == rhs.sType )
  53573. && ( pNext == rhs.pNext )
  53574. && ( privateData == rhs.privateData );
  53575. }
  53576. bool operator!=( PhysicalDevicePrivateDataFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  53577. {
  53578. return !operator==( rhs );
  53579. }
  53580. #endif
  53581. public:
  53582. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePrivateDataFeaturesEXT;
  53583. void* pNext = {};
  53584. VULKAN_HPP_NAMESPACE::Bool32 privateData = {};
  53585. };
  53586. static_assert( sizeof( PhysicalDevicePrivateDataFeaturesEXT ) == sizeof( VkPhysicalDevicePrivateDataFeaturesEXT ), "struct and wrapper have different size!" );
  53587. static_assert( std::is_standard_layout<PhysicalDevicePrivateDataFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  53588. template <>
  53589. struct CppType<StructureType, StructureType::ePhysicalDevicePrivateDataFeaturesEXT>
  53590. {
  53591. using Type = PhysicalDevicePrivateDataFeaturesEXT;
  53592. };
  53593. struct PhysicalDeviceProtectedMemoryFeatures
  53594. {
  53595. static const bool allowDuplicate = false;
  53596. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
  53597. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53598. VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures(VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}) VULKAN_HPP_NOEXCEPT
  53599. : protectedMemory( protectedMemory_ )
  53600. {}
  53601. VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryFeatures( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53602. PhysicalDeviceProtectedMemoryFeatures( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  53603. : PhysicalDeviceProtectedMemoryFeatures( *reinterpret_cast<PhysicalDeviceProtectedMemoryFeatures const *>( &rhs ) )
  53604. {}
  53605. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53606. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryFeatures & operator=( PhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53607. PhysicalDeviceProtectedMemoryFeatures & operator=( VkPhysicalDeviceProtectedMemoryFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  53608. {
  53609. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures const *>( &rhs );
  53610. return *this;
  53611. }
  53612. PhysicalDeviceProtectedMemoryFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53613. {
  53614. pNext = pNext_;
  53615. return *this;
  53616. }
  53617. PhysicalDeviceProtectedMemoryFeatures & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
  53618. {
  53619. protectedMemory = protectedMemory_;
  53620. return *this;
  53621. }
  53622. operator VkPhysicalDeviceProtectedMemoryFeatures const&() const VULKAN_HPP_NOEXCEPT
  53623. {
  53624. return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryFeatures*>( this );
  53625. }
  53626. operator VkPhysicalDeviceProtectedMemoryFeatures &() VULKAN_HPP_NOEXCEPT
  53627. {
  53628. return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryFeatures*>( this );
  53629. }
  53630. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53631. auto operator<=>( PhysicalDeviceProtectedMemoryFeatures const& ) const = default;
  53632. #else
  53633. bool operator==( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  53634. {
  53635. return ( sType == rhs.sType )
  53636. && ( pNext == rhs.pNext )
  53637. && ( protectedMemory == rhs.protectedMemory );
  53638. }
  53639. bool operator!=( PhysicalDeviceProtectedMemoryFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  53640. {
  53641. return !operator==( rhs );
  53642. }
  53643. #endif
  53644. public:
  53645. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryFeatures;
  53646. void* pNext = {};
  53647. VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
  53648. };
  53649. static_assert( sizeof( PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ), "struct and wrapper have different size!" );
  53650. static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryFeatures>::value, "struct wrapper is not a standard layout!" );
  53651. template <>
  53652. struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryFeatures>
  53653. {
  53654. using Type = PhysicalDeviceProtectedMemoryFeatures;
  53655. };
  53656. struct PhysicalDeviceProtectedMemoryProperties
  53657. {
  53658. static const bool allowDuplicate = false;
  53659. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
  53660. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53661. VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties(VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}) VULKAN_HPP_NOEXCEPT
  53662. : protectedNoFault( protectedNoFault_ )
  53663. {}
  53664. VULKAN_HPP_CONSTEXPR PhysicalDeviceProtectedMemoryProperties( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53665. PhysicalDeviceProtectedMemoryProperties( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  53666. : PhysicalDeviceProtectedMemoryProperties( *reinterpret_cast<PhysicalDeviceProtectedMemoryProperties const *>( &rhs ) )
  53667. {}
  53668. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53669. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceProtectedMemoryProperties & operator=( PhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53670. PhysicalDeviceProtectedMemoryProperties & operator=( VkPhysicalDeviceProtectedMemoryProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  53671. {
  53672. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties const *>( &rhs );
  53673. return *this;
  53674. }
  53675. operator VkPhysicalDeviceProtectedMemoryProperties const&() const VULKAN_HPP_NOEXCEPT
  53676. {
  53677. return *reinterpret_cast<const VkPhysicalDeviceProtectedMemoryProperties*>( this );
  53678. }
  53679. operator VkPhysicalDeviceProtectedMemoryProperties &() VULKAN_HPP_NOEXCEPT
  53680. {
  53681. return *reinterpret_cast<VkPhysicalDeviceProtectedMemoryProperties*>( this );
  53682. }
  53683. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53684. auto operator<=>( PhysicalDeviceProtectedMemoryProperties const& ) const = default;
  53685. #else
  53686. bool operator==( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  53687. {
  53688. return ( sType == rhs.sType )
  53689. && ( pNext == rhs.pNext )
  53690. && ( protectedNoFault == rhs.protectedNoFault );
  53691. }
  53692. bool operator!=( PhysicalDeviceProtectedMemoryProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  53693. {
  53694. return !operator==( rhs );
  53695. }
  53696. #endif
  53697. public:
  53698. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceProtectedMemoryProperties;
  53699. void* pNext = {};
  53700. VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
  53701. };
  53702. static_assert( sizeof( PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ), "struct and wrapper have different size!" );
  53703. static_assert( std::is_standard_layout<PhysicalDeviceProtectedMemoryProperties>::value, "struct wrapper is not a standard layout!" );
  53704. template <>
  53705. struct CppType<StructureType, StructureType::ePhysicalDeviceProtectedMemoryProperties>
  53706. {
  53707. using Type = PhysicalDeviceProtectedMemoryProperties;
  53708. };
  53709. struct PhysicalDevicePushDescriptorPropertiesKHR
  53710. {
  53711. static const bool allowDuplicate = false;
  53712. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
  53713. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53714. VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR(uint32_t maxPushDescriptors_ = {}) VULKAN_HPP_NOEXCEPT
  53715. : maxPushDescriptors( maxPushDescriptors_ )
  53716. {}
  53717. VULKAN_HPP_CONSTEXPR PhysicalDevicePushDescriptorPropertiesKHR( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53718. PhysicalDevicePushDescriptorPropertiesKHR( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53719. : PhysicalDevicePushDescriptorPropertiesKHR( *reinterpret_cast<PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs ) )
  53720. {}
  53721. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53722. VULKAN_HPP_CONSTEXPR_14 PhysicalDevicePushDescriptorPropertiesKHR & operator=( PhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53723. PhysicalDevicePushDescriptorPropertiesKHR & operator=( VkPhysicalDevicePushDescriptorPropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53724. {
  53725. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR const *>( &rhs );
  53726. return *this;
  53727. }
  53728. operator VkPhysicalDevicePushDescriptorPropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  53729. {
  53730. return *reinterpret_cast<const VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
  53731. }
  53732. operator VkPhysicalDevicePushDescriptorPropertiesKHR &() VULKAN_HPP_NOEXCEPT
  53733. {
  53734. return *reinterpret_cast<VkPhysicalDevicePushDescriptorPropertiesKHR*>( this );
  53735. }
  53736. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53737. auto operator<=>( PhysicalDevicePushDescriptorPropertiesKHR const& ) const = default;
  53738. #else
  53739. bool operator==( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53740. {
  53741. return ( sType == rhs.sType )
  53742. && ( pNext == rhs.pNext )
  53743. && ( maxPushDescriptors == rhs.maxPushDescriptors );
  53744. }
  53745. bool operator!=( PhysicalDevicePushDescriptorPropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53746. {
  53747. return !operator==( rhs );
  53748. }
  53749. #endif
  53750. public:
  53751. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDevicePushDescriptorPropertiesKHR;
  53752. void* pNext = {};
  53753. uint32_t maxPushDescriptors = {};
  53754. };
  53755. static_assert( sizeof( PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ), "struct and wrapper have different size!" );
  53756. static_assert( std::is_standard_layout<PhysicalDevicePushDescriptorPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  53757. template <>
  53758. struct CppType<StructureType, StructureType::ePhysicalDevicePushDescriptorPropertiesKHR>
  53759. {
  53760. using Type = PhysicalDevicePushDescriptorPropertiesKHR;
  53761. };
  53762. struct PhysicalDeviceRayQueryFeaturesKHR
  53763. {
  53764. static const bool allowDuplicate = false;
  53765. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
  53766. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53767. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ = {}) VULKAN_HPP_NOEXCEPT
  53768. : rayQuery( rayQuery_ )
  53769. {}
  53770. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayQueryFeaturesKHR( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53771. PhysicalDeviceRayQueryFeaturesKHR( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53772. : PhysicalDeviceRayQueryFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs ) )
  53773. {}
  53774. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53775. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayQueryFeaturesKHR & operator=( PhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53776. PhysicalDeviceRayQueryFeaturesKHR & operator=( VkPhysicalDeviceRayQueryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53777. {
  53778. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR const *>( &rhs );
  53779. return *this;
  53780. }
  53781. PhysicalDeviceRayQueryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53782. {
  53783. pNext = pNext_;
  53784. return *this;
  53785. }
  53786. PhysicalDeviceRayQueryFeaturesKHR & setRayQuery( VULKAN_HPP_NAMESPACE::Bool32 rayQuery_ ) VULKAN_HPP_NOEXCEPT
  53787. {
  53788. rayQuery = rayQuery_;
  53789. return *this;
  53790. }
  53791. operator VkPhysicalDeviceRayQueryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  53792. {
  53793. return *reinterpret_cast<const VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
  53794. }
  53795. operator VkPhysicalDeviceRayQueryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  53796. {
  53797. return *reinterpret_cast<VkPhysicalDeviceRayQueryFeaturesKHR*>( this );
  53798. }
  53799. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53800. auto operator<=>( PhysicalDeviceRayQueryFeaturesKHR const& ) const = default;
  53801. #else
  53802. bool operator==( PhysicalDeviceRayQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53803. {
  53804. return ( sType == rhs.sType )
  53805. && ( pNext == rhs.pNext )
  53806. && ( rayQuery == rhs.rayQuery );
  53807. }
  53808. bool operator!=( PhysicalDeviceRayQueryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53809. {
  53810. return !operator==( rhs );
  53811. }
  53812. #endif
  53813. public:
  53814. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayQueryFeaturesKHR;
  53815. void* pNext = {};
  53816. VULKAN_HPP_NAMESPACE::Bool32 rayQuery = {};
  53817. };
  53818. static_assert( sizeof( PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ), "struct and wrapper have different size!" );
  53819. static_assert( std::is_standard_layout<PhysicalDeviceRayQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  53820. template <>
  53821. struct CppType<StructureType, StructureType::ePhysicalDeviceRayQueryFeaturesKHR>
  53822. {
  53823. using Type = PhysicalDeviceRayQueryFeaturesKHR;
  53824. };
  53825. struct PhysicalDeviceRayTracingPipelineFeaturesKHR
  53826. {
  53827. static const bool allowDuplicate = false;
  53828. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
  53829. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53830. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ = {}) VULKAN_HPP_NOEXCEPT
  53831. : rayTracingPipeline( rayTracingPipeline_ ), rayTracingPipelineShaderGroupHandleCaptureReplay( rayTracingPipelineShaderGroupHandleCaptureReplay_ ), rayTracingPipelineShaderGroupHandleCaptureReplayMixed( rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ), rayTracingPipelineTraceRaysIndirect( rayTracingPipelineTraceRaysIndirect_ ), rayTraversalPrimitiveCulling( rayTraversalPrimitiveCulling_ )
  53832. {}
  53833. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelineFeaturesKHR( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53834. PhysicalDeviceRayTracingPipelineFeaturesKHR( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53835. : PhysicalDeviceRayTracingPipelineFeaturesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs ) )
  53836. {}
  53837. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53838. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( PhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53839. PhysicalDeviceRayTracingPipelineFeaturesKHR & operator=( VkPhysicalDeviceRayTracingPipelineFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53840. {
  53841. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR const *>( &rhs );
  53842. return *this;
  53843. }
  53844. PhysicalDeviceRayTracingPipelineFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  53845. {
  53846. pNext = pNext_;
  53847. return *this;
  53848. }
  53849. PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipeline( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline_ ) VULKAN_HPP_NOEXCEPT
  53850. {
  53851. rayTracingPipeline = rayTracingPipeline_;
  53852. return *this;
  53853. }
  53854. PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
  53855. {
  53856. rayTracingPipelineShaderGroupHandleCaptureReplay = rayTracingPipelineShaderGroupHandleCaptureReplay_;
  53857. return *this;
  53858. }
  53859. PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineShaderGroupHandleCaptureReplayMixed( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed_ ) VULKAN_HPP_NOEXCEPT
  53860. {
  53861. rayTracingPipelineShaderGroupHandleCaptureReplayMixed = rayTracingPipelineShaderGroupHandleCaptureReplayMixed_;
  53862. return *this;
  53863. }
  53864. PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTracingPipelineTraceRaysIndirect( VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect_ ) VULKAN_HPP_NOEXCEPT
  53865. {
  53866. rayTracingPipelineTraceRaysIndirect = rayTracingPipelineTraceRaysIndirect_;
  53867. return *this;
  53868. }
  53869. PhysicalDeviceRayTracingPipelineFeaturesKHR & setRayTraversalPrimitiveCulling( VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling_ ) VULKAN_HPP_NOEXCEPT
  53870. {
  53871. rayTraversalPrimitiveCulling = rayTraversalPrimitiveCulling_;
  53872. return *this;
  53873. }
  53874. operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  53875. {
  53876. return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
  53877. }
  53878. operator VkPhysicalDeviceRayTracingPipelineFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  53879. {
  53880. return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelineFeaturesKHR*>( this );
  53881. }
  53882. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53883. auto operator<=>( PhysicalDeviceRayTracingPipelineFeaturesKHR const& ) const = default;
  53884. #else
  53885. bool operator==( PhysicalDeviceRayTracingPipelineFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53886. {
  53887. return ( sType == rhs.sType )
  53888. && ( pNext == rhs.pNext )
  53889. && ( rayTracingPipeline == rhs.rayTracingPipeline )
  53890. && ( rayTracingPipelineShaderGroupHandleCaptureReplay == rhs.rayTracingPipelineShaderGroupHandleCaptureReplay )
  53891. && ( rayTracingPipelineShaderGroupHandleCaptureReplayMixed == rhs.rayTracingPipelineShaderGroupHandleCaptureReplayMixed )
  53892. && ( rayTracingPipelineTraceRaysIndirect == rhs.rayTracingPipelineTraceRaysIndirect )
  53893. && ( rayTraversalPrimitiveCulling == rhs.rayTraversalPrimitiveCulling );
  53894. }
  53895. bool operator!=( PhysicalDeviceRayTracingPipelineFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53896. {
  53897. return !operator==( rhs );
  53898. }
  53899. #endif
  53900. public:
  53901. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR;
  53902. void* pNext = {};
  53903. VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipeline = {};
  53904. VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplay = {};
  53905. VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed = {};
  53906. VULKAN_HPP_NAMESPACE::Bool32 rayTracingPipelineTraceRaysIndirect = {};
  53907. VULKAN_HPP_NAMESPACE::Bool32 rayTraversalPrimitiveCulling = {};
  53908. };
  53909. static_assert( sizeof( PhysicalDeviceRayTracingPipelineFeaturesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ), "struct and wrapper have different size!" );
  53910. static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPipelineFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  53911. template <>
  53912. struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelineFeaturesKHR>
  53913. {
  53914. using Type = PhysicalDeviceRayTracingPipelineFeaturesKHR;
  53915. };
  53916. struct PhysicalDeviceRayTracingPipelinePropertiesKHR
  53917. {
  53918. static const bool allowDuplicate = false;
  53919. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
  53920. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53921. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRayRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint32_t shaderGroupHandleCaptureReplaySize_ = {}, uint32_t maxRayDispatchInvocationCount_ = {}, uint32_t shaderGroupHandleAlignment_ = {}, uint32_t maxRayHitAttributeSize_ = {}) VULKAN_HPP_NOEXCEPT
  53922. : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRayRecursionDepth( maxRayRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), shaderGroupHandleCaptureReplaySize( shaderGroupHandleCaptureReplaySize_ ), maxRayDispatchInvocationCount( maxRayDispatchInvocationCount_ ), shaderGroupHandleAlignment( shaderGroupHandleAlignment_ ), maxRayHitAttributeSize( maxRayHitAttributeSize_ )
  53923. {}
  53924. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPipelinePropertiesKHR( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53925. PhysicalDeviceRayTracingPipelinePropertiesKHR( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53926. : PhysicalDeviceRayTracingPipelinePropertiesKHR( *reinterpret_cast<PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs ) )
  53927. {}
  53928. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53929. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( PhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53930. PhysicalDeviceRayTracingPipelinePropertiesKHR & operator=( VkPhysicalDeviceRayTracingPipelinePropertiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  53931. {
  53932. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR const *>( &rhs );
  53933. return *this;
  53934. }
  53935. operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR const&() const VULKAN_HPP_NOEXCEPT
  53936. {
  53937. return *reinterpret_cast<const VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
  53938. }
  53939. operator VkPhysicalDeviceRayTracingPipelinePropertiesKHR &() VULKAN_HPP_NOEXCEPT
  53940. {
  53941. return *reinterpret_cast<VkPhysicalDeviceRayTracingPipelinePropertiesKHR*>( this );
  53942. }
  53943. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  53944. auto operator<=>( PhysicalDeviceRayTracingPipelinePropertiesKHR const& ) const = default;
  53945. #else
  53946. bool operator==( PhysicalDeviceRayTracingPipelinePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53947. {
  53948. return ( sType == rhs.sType )
  53949. && ( pNext == rhs.pNext )
  53950. && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
  53951. && ( maxRayRecursionDepth == rhs.maxRayRecursionDepth )
  53952. && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
  53953. && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
  53954. && ( shaderGroupHandleCaptureReplaySize == rhs.shaderGroupHandleCaptureReplaySize )
  53955. && ( maxRayDispatchInvocationCount == rhs.maxRayDispatchInvocationCount )
  53956. && ( shaderGroupHandleAlignment == rhs.shaderGroupHandleAlignment )
  53957. && ( maxRayHitAttributeSize == rhs.maxRayHitAttributeSize );
  53958. }
  53959. bool operator!=( PhysicalDeviceRayTracingPipelinePropertiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  53960. {
  53961. return !operator==( rhs );
  53962. }
  53963. #endif
  53964. public:
  53965. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR;
  53966. void* pNext = {};
  53967. uint32_t shaderGroupHandleSize = {};
  53968. uint32_t maxRayRecursionDepth = {};
  53969. uint32_t maxShaderGroupStride = {};
  53970. uint32_t shaderGroupBaseAlignment = {};
  53971. uint32_t shaderGroupHandleCaptureReplaySize = {};
  53972. uint32_t maxRayDispatchInvocationCount = {};
  53973. uint32_t shaderGroupHandleAlignment = {};
  53974. uint32_t maxRayHitAttributeSize = {};
  53975. };
  53976. static_assert( sizeof( PhysicalDeviceRayTracingPipelinePropertiesKHR ) == sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ), "struct and wrapper have different size!" );
  53977. static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPipelinePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
  53978. template <>
  53979. struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPipelinePropertiesKHR>
  53980. {
  53981. using Type = PhysicalDeviceRayTracingPipelinePropertiesKHR;
  53982. };
  53983. struct PhysicalDeviceRayTracingPropertiesNV
  53984. {
  53985. static const bool allowDuplicate = false;
  53986. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
  53987. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53988. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV(uint32_t shaderGroupHandleSize_ = {}, uint32_t maxRecursionDepth_ = {}, uint32_t maxShaderGroupStride_ = {}, uint32_t shaderGroupBaseAlignment_ = {}, uint64_t maxGeometryCount_ = {}, uint64_t maxInstanceCount_ = {}, uint64_t maxTriangleCount_ = {}, uint32_t maxDescriptorSetAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
  53989. : shaderGroupHandleSize( shaderGroupHandleSize_ ), maxRecursionDepth( maxRecursionDepth_ ), maxShaderGroupStride( maxShaderGroupStride_ ), shaderGroupBaseAlignment( shaderGroupBaseAlignment_ ), maxGeometryCount( maxGeometryCount_ ), maxInstanceCount( maxInstanceCount_ ), maxTriangleCount( maxTriangleCount_ ), maxDescriptorSetAccelerationStructures( maxDescriptorSetAccelerationStructures_ )
  53990. {}
  53991. VULKAN_HPP_CONSTEXPR PhysicalDeviceRayTracingPropertiesNV( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53992. PhysicalDeviceRayTracingPropertiesNV( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  53993. : PhysicalDeviceRayTracingPropertiesNV( *reinterpret_cast<PhysicalDeviceRayTracingPropertiesNV const *>( &rhs ) )
  53994. {}
  53995. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  53996. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRayTracingPropertiesNV & operator=( PhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  53997. PhysicalDeviceRayTracingPropertiesNV & operator=( VkPhysicalDeviceRayTracingPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  53998. {
  53999. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV const *>( &rhs );
  54000. return *this;
  54001. }
  54002. operator VkPhysicalDeviceRayTracingPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  54003. {
  54004. return *reinterpret_cast<const VkPhysicalDeviceRayTracingPropertiesNV*>( this );
  54005. }
  54006. operator VkPhysicalDeviceRayTracingPropertiesNV &() VULKAN_HPP_NOEXCEPT
  54007. {
  54008. return *reinterpret_cast<VkPhysicalDeviceRayTracingPropertiesNV*>( this );
  54009. }
  54010. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54011. auto operator<=>( PhysicalDeviceRayTracingPropertiesNV const& ) const = default;
  54012. #else
  54013. bool operator==( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  54014. {
  54015. return ( sType == rhs.sType )
  54016. && ( pNext == rhs.pNext )
  54017. && ( shaderGroupHandleSize == rhs.shaderGroupHandleSize )
  54018. && ( maxRecursionDepth == rhs.maxRecursionDepth )
  54019. && ( maxShaderGroupStride == rhs.maxShaderGroupStride )
  54020. && ( shaderGroupBaseAlignment == rhs.shaderGroupBaseAlignment )
  54021. && ( maxGeometryCount == rhs.maxGeometryCount )
  54022. && ( maxInstanceCount == rhs.maxInstanceCount )
  54023. && ( maxTriangleCount == rhs.maxTriangleCount )
  54024. && ( maxDescriptorSetAccelerationStructures == rhs.maxDescriptorSetAccelerationStructures );
  54025. }
  54026. bool operator!=( PhysicalDeviceRayTracingPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  54027. {
  54028. return !operator==( rhs );
  54029. }
  54030. #endif
  54031. public:
  54032. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRayTracingPropertiesNV;
  54033. void* pNext = {};
  54034. uint32_t shaderGroupHandleSize = {};
  54035. uint32_t maxRecursionDepth = {};
  54036. uint32_t maxShaderGroupStride = {};
  54037. uint32_t shaderGroupBaseAlignment = {};
  54038. uint64_t maxGeometryCount = {};
  54039. uint64_t maxInstanceCount = {};
  54040. uint64_t maxTriangleCount = {};
  54041. uint32_t maxDescriptorSetAccelerationStructures = {};
  54042. };
  54043. static_assert( sizeof( PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ), "struct and wrapper have different size!" );
  54044. static_assert( std::is_standard_layout<PhysicalDeviceRayTracingPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  54045. template <>
  54046. struct CppType<StructureType, StructureType::ePhysicalDeviceRayTracingPropertiesNV>
  54047. {
  54048. using Type = PhysicalDeviceRayTracingPropertiesNV;
  54049. };
  54050. struct PhysicalDeviceRepresentativeFragmentTestFeaturesNV
  54051. {
  54052. static const bool allowDuplicate = false;
  54053. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  54054. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54055. VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ = {}) VULKAN_HPP_NOEXCEPT
  54056. : representativeFragmentTest( representativeFragmentTest_ )
  54057. {}
  54058. VULKAN_HPP_CONSTEXPR PhysicalDeviceRepresentativeFragmentTestFeaturesNV( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54059. PhysicalDeviceRepresentativeFragmentTestFeaturesNV( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  54060. : PhysicalDeviceRepresentativeFragmentTestFeaturesNV( *reinterpret_cast<PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs ) )
  54061. {}
  54062. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54063. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54064. PhysicalDeviceRepresentativeFragmentTestFeaturesNV & operator=( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  54065. {
  54066. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV const *>( &rhs );
  54067. return *this;
  54068. }
  54069. PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54070. {
  54071. pNext = pNext_;
  54072. return *this;
  54073. }
  54074. PhysicalDeviceRepresentativeFragmentTestFeaturesNV & setRepresentativeFragmentTest( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest_ ) VULKAN_HPP_NOEXCEPT
  54075. {
  54076. representativeFragmentTest = representativeFragmentTest_;
  54077. return *this;
  54078. }
  54079. operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  54080. {
  54081. return *reinterpret_cast<const VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
  54082. }
  54083. operator VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV &() VULKAN_HPP_NOEXCEPT
  54084. {
  54085. return *reinterpret_cast<VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV*>( this );
  54086. }
  54087. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54088. auto operator<=>( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& ) const = default;
  54089. #else
  54090. bool operator==( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  54091. {
  54092. return ( sType == rhs.sType )
  54093. && ( pNext == rhs.pNext )
  54094. && ( representativeFragmentTest == rhs.representativeFragmentTest );
  54095. }
  54096. bool operator!=( PhysicalDeviceRepresentativeFragmentTestFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  54097. {
  54098. return !operator==( rhs );
  54099. }
  54100. #endif
  54101. public:
  54102. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  54103. void* pNext = {};
  54104. VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTest = {};
  54105. };
  54106. static_assert( sizeof( PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) == sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ), "struct and wrapper have different size!" );
  54107. static_assert( std::is_standard_layout<PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  54108. template <>
  54109. struct CppType<StructureType, StructureType::ePhysicalDeviceRepresentativeFragmentTestFeaturesNV>
  54110. {
  54111. using Type = PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
  54112. };
  54113. struct PhysicalDeviceRobustness2FeaturesEXT
  54114. {
  54115. static const bool allowDuplicate = false;
  54116. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
  54117. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54118. VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ = {}, VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
  54119. : robustBufferAccess2( robustBufferAccess2_ ), robustImageAccess2( robustImageAccess2_ ), nullDescriptor( nullDescriptor_ )
  54120. {}
  54121. VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2FeaturesEXT( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54122. PhysicalDeviceRobustness2FeaturesEXT( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54123. : PhysicalDeviceRobustness2FeaturesEXT( *reinterpret_cast<PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs ) )
  54124. {}
  54125. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54126. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2FeaturesEXT & operator=( PhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54127. PhysicalDeviceRobustness2FeaturesEXT & operator=( VkPhysicalDeviceRobustness2FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54128. {
  54129. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT const *>( &rhs );
  54130. return *this;
  54131. }
  54132. PhysicalDeviceRobustness2FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54133. {
  54134. pNext = pNext_;
  54135. return *this;
  54136. }
  54137. PhysicalDeviceRobustness2FeaturesEXT & setRobustBufferAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2_ ) VULKAN_HPP_NOEXCEPT
  54138. {
  54139. robustBufferAccess2 = robustBufferAccess2_;
  54140. return *this;
  54141. }
  54142. PhysicalDeviceRobustness2FeaturesEXT & setRobustImageAccess2( VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2_ ) VULKAN_HPP_NOEXCEPT
  54143. {
  54144. robustImageAccess2 = robustImageAccess2_;
  54145. return *this;
  54146. }
  54147. PhysicalDeviceRobustness2FeaturesEXT & setNullDescriptor( VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor_ ) VULKAN_HPP_NOEXCEPT
  54148. {
  54149. nullDescriptor = nullDescriptor_;
  54150. return *this;
  54151. }
  54152. operator VkPhysicalDeviceRobustness2FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  54153. {
  54154. return *reinterpret_cast<const VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
  54155. }
  54156. operator VkPhysicalDeviceRobustness2FeaturesEXT &() VULKAN_HPP_NOEXCEPT
  54157. {
  54158. return *reinterpret_cast<VkPhysicalDeviceRobustness2FeaturesEXT*>( this );
  54159. }
  54160. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54161. auto operator<=>( PhysicalDeviceRobustness2FeaturesEXT const& ) const = default;
  54162. #else
  54163. bool operator==( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54164. {
  54165. return ( sType == rhs.sType )
  54166. && ( pNext == rhs.pNext )
  54167. && ( robustBufferAccess2 == rhs.robustBufferAccess2 )
  54168. && ( robustImageAccess2 == rhs.robustImageAccess2 )
  54169. && ( nullDescriptor == rhs.nullDescriptor );
  54170. }
  54171. bool operator!=( PhysicalDeviceRobustness2FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54172. {
  54173. return !operator==( rhs );
  54174. }
  54175. #endif
  54176. public:
  54177. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2FeaturesEXT;
  54178. void* pNext = {};
  54179. VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccess2 = {};
  54180. VULKAN_HPP_NAMESPACE::Bool32 robustImageAccess2 = {};
  54181. VULKAN_HPP_NAMESPACE::Bool32 nullDescriptor = {};
  54182. };
  54183. static_assert( sizeof( PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ), "struct and wrapper have different size!" );
  54184. static_assert( std::is_standard_layout<PhysicalDeviceRobustness2FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  54185. template <>
  54186. struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2FeaturesEXT>
  54187. {
  54188. using Type = PhysicalDeviceRobustness2FeaturesEXT;
  54189. };
  54190. struct PhysicalDeviceRobustness2PropertiesEXT
  54191. {
  54192. static const bool allowDuplicate = false;
  54193. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
  54194. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54195. VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  54196. : robustStorageBufferAccessSizeAlignment( robustStorageBufferAccessSizeAlignment_ ), robustUniformBufferAccessSizeAlignment( robustUniformBufferAccessSizeAlignment_ )
  54197. {}
  54198. VULKAN_HPP_CONSTEXPR PhysicalDeviceRobustness2PropertiesEXT( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54199. PhysicalDeviceRobustness2PropertiesEXT( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54200. : PhysicalDeviceRobustness2PropertiesEXT( *reinterpret_cast<PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs ) )
  54201. {}
  54202. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54203. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceRobustness2PropertiesEXT & operator=( PhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54204. PhysicalDeviceRobustness2PropertiesEXT & operator=( VkPhysicalDeviceRobustness2PropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54205. {
  54206. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT const *>( &rhs );
  54207. return *this;
  54208. }
  54209. operator VkPhysicalDeviceRobustness2PropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  54210. {
  54211. return *reinterpret_cast<const VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
  54212. }
  54213. operator VkPhysicalDeviceRobustness2PropertiesEXT &() VULKAN_HPP_NOEXCEPT
  54214. {
  54215. return *reinterpret_cast<VkPhysicalDeviceRobustness2PropertiesEXT*>( this );
  54216. }
  54217. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54218. auto operator<=>( PhysicalDeviceRobustness2PropertiesEXT const& ) const = default;
  54219. #else
  54220. bool operator==( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54221. {
  54222. return ( sType == rhs.sType )
  54223. && ( pNext == rhs.pNext )
  54224. && ( robustStorageBufferAccessSizeAlignment == rhs.robustStorageBufferAccessSizeAlignment )
  54225. && ( robustUniformBufferAccessSizeAlignment == rhs.robustUniformBufferAccessSizeAlignment );
  54226. }
  54227. bool operator!=( PhysicalDeviceRobustness2PropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54228. {
  54229. return !operator==( rhs );
  54230. }
  54231. #endif
  54232. public:
  54233. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceRobustness2PropertiesEXT;
  54234. void* pNext = {};
  54235. VULKAN_HPP_NAMESPACE::DeviceSize robustStorageBufferAccessSizeAlignment = {};
  54236. VULKAN_HPP_NAMESPACE::DeviceSize robustUniformBufferAccessSizeAlignment = {};
  54237. };
  54238. static_assert( sizeof( PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ), "struct and wrapper have different size!" );
  54239. static_assert( std::is_standard_layout<PhysicalDeviceRobustness2PropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  54240. template <>
  54241. struct CppType<StructureType, StructureType::ePhysicalDeviceRobustness2PropertiesEXT>
  54242. {
  54243. using Type = PhysicalDeviceRobustness2PropertiesEXT;
  54244. };
  54245. struct PhysicalDeviceSampleLocationsPropertiesEXT
  54246. {
  54247. static const bool allowDuplicate = false;
  54248. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
  54249. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54250. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT(VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts_ = {}, VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize_ = {}, std::array<float,2> const& sampleLocationCoordinateRange_ = {}, uint32_t sampleLocationSubPixelBits_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
  54251. : sampleLocationSampleCounts( sampleLocationSampleCounts_ ), maxSampleLocationGridSize( maxSampleLocationGridSize_ ), sampleLocationCoordinateRange( sampleLocationCoordinateRange_ ), sampleLocationSubPixelBits( sampleLocationSubPixelBits_ ), variableSampleLocations( variableSampleLocations_ )
  54252. {}
  54253. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54254. PhysicalDeviceSampleLocationsPropertiesEXT( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54255. : PhysicalDeviceSampleLocationsPropertiesEXT( *reinterpret_cast<PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs ) )
  54256. {}
  54257. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54258. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSampleLocationsPropertiesEXT & operator=( PhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54259. PhysicalDeviceSampleLocationsPropertiesEXT & operator=( VkPhysicalDeviceSampleLocationsPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54260. {
  54261. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT const *>( &rhs );
  54262. return *this;
  54263. }
  54264. operator VkPhysicalDeviceSampleLocationsPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  54265. {
  54266. return *reinterpret_cast<const VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
  54267. }
  54268. operator VkPhysicalDeviceSampleLocationsPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  54269. {
  54270. return *reinterpret_cast<VkPhysicalDeviceSampleLocationsPropertiesEXT*>( this );
  54271. }
  54272. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54273. auto operator<=>( PhysicalDeviceSampleLocationsPropertiesEXT const& ) const = default;
  54274. #else
  54275. bool operator==( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54276. {
  54277. return ( sType == rhs.sType )
  54278. && ( pNext == rhs.pNext )
  54279. && ( sampleLocationSampleCounts == rhs.sampleLocationSampleCounts )
  54280. && ( maxSampleLocationGridSize == rhs.maxSampleLocationGridSize )
  54281. && ( sampleLocationCoordinateRange == rhs.sampleLocationCoordinateRange )
  54282. && ( sampleLocationSubPixelBits == rhs.sampleLocationSubPixelBits )
  54283. && ( variableSampleLocations == rhs.variableSampleLocations );
  54284. }
  54285. bool operator!=( PhysicalDeviceSampleLocationsPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54286. {
  54287. return !operator==( rhs );
  54288. }
  54289. #endif
  54290. public:
  54291. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT;
  54292. void* pNext = {};
  54293. VULKAN_HPP_NAMESPACE::SampleCountFlags sampleLocationSampleCounts = {};
  54294. VULKAN_HPP_NAMESPACE::Extent2D maxSampleLocationGridSize = {};
  54295. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<float, 2> sampleLocationCoordinateRange = {};
  54296. uint32_t sampleLocationSubPixelBits = {};
  54297. VULKAN_HPP_NAMESPACE::Bool32 variableSampleLocations = {};
  54298. };
  54299. static_assert( sizeof( PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ), "struct and wrapper have different size!" );
  54300. static_assert( std::is_standard_layout<PhysicalDeviceSampleLocationsPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  54301. template <>
  54302. struct CppType<StructureType, StructureType::ePhysicalDeviceSampleLocationsPropertiesEXT>
  54303. {
  54304. using Type = PhysicalDeviceSampleLocationsPropertiesEXT;
  54305. };
  54306. struct PhysicalDeviceSamplerFilterMinmaxProperties
  54307. {
  54308. static const bool allowDuplicate = false;
  54309. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
  54310. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54311. VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties(VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}) VULKAN_HPP_NOEXCEPT
  54312. : filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ )
  54313. {}
  54314. VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerFilterMinmaxProperties( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54315. PhysicalDeviceSamplerFilterMinmaxProperties( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  54316. : PhysicalDeviceSamplerFilterMinmaxProperties( *reinterpret_cast<PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs ) )
  54317. {}
  54318. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54319. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerFilterMinmaxProperties & operator=( PhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54320. PhysicalDeviceSamplerFilterMinmaxProperties & operator=( VkPhysicalDeviceSamplerFilterMinmaxProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  54321. {
  54322. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties const *>( &rhs );
  54323. return *this;
  54324. }
  54325. operator VkPhysicalDeviceSamplerFilterMinmaxProperties const&() const VULKAN_HPP_NOEXCEPT
  54326. {
  54327. return *reinterpret_cast<const VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
  54328. }
  54329. operator VkPhysicalDeviceSamplerFilterMinmaxProperties &() VULKAN_HPP_NOEXCEPT
  54330. {
  54331. return *reinterpret_cast<VkPhysicalDeviceSamplerFilterMinmaxProperties*>( this );
  54332. }
  54333. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54334. auto operator<=>( PhysicalDeviceSamplerFilterMinmaxProperties const& ) const = default;
  54335. #else
  54336. bool operator==( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  54337. {
  54338. return ( sType == rhs.sType )
  54339. && ( pNext == rhs.pNext )
  54340. && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
  54341. && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping );
  54342. }
  54343. bool operator!=( PhysicalDeviceSamplerFilterMinmaxProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  54344. {
  54345. return !operator==( rhs );
  54346. }
  54347. #endif
  54348. public:
  54349. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties;
  54350. void* pNext = {};
  54351. VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
  54352. VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
  54353. };
  54354. static_assert( sizeof( PhysicalDeviceSamplerFilterMinmaxProperties ) == sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ), "struct and wrapper have different size!" );
  54355. static_assert( std::is_standard_layout<PhysicalDeviceSamplerFilterMinmaxProperties>::value, "struct wrapper is not a standard layout!" );
  54356. template <>
  54357. struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerFilterMinmaxProperties>
  54358. {
  54359. using Type = PhysicalDeviceSamplerFilterMinmaxProperties;
  54360. };
  54361. using PhysicalDeviceSamplerFilterMinmaxPropertiesEXT = PhysicalDeviceSamplerFilterMinmaxProperties;
  54362. struct PhysicalDeviceSamplerYcbcrConversionFeatures
  54363. {
  54364. static const bool allowDuplicate = false;
  54365. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
  54366. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54367. VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures(VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}) VULKAN_HPP_NOEXCEPT
  54368. : samplerYcbcrConversion( samplerYcbcrConversion_ )
  54369. {}
  54370. VULKAN_HPP_CONSTEXPR PhysicalDeviceSamplerYcbcrConversionFeatures( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54371. PhysicalDeviceSamplerYcbcrConversionFeatures( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54372. : PhysicalDeviceSamplerYcbcrConversionFeatures( *reinterpret_cast<PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs ) )
  54373. {}
  54374. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54375. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( PhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54376. PhysicalDeviceSamplerYcbcrConversionFeatures & operator=( VkPhysicalDeviceSamplerYcbcrConversionFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54377. {
  54378. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures const *>( &rhs );
  54379. return *this;
  54380. }
  54381. PhysicalDeviceSamplerYcbcrConversionFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54382. {
  54383. pNext = pNext_;
  54384. return *this;
  54385. }
  54386. PhysicalDeviceSamplerYcbcrConversionFeatures & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
  54387. {
  54388. samplerYcbcrConversion = samplerYcbcrConversion_;
  54389. return *this;
  54390. }
  54391. operator VkPhysicalDeviceSamplerYcbcrConversionFeatures const&() const VULKAN_HPP_NOEXCEPT
  54392. {
  54393. return *reinterpret_cast<const VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
  54394. }
  54395. operator VkPhysicalDeviceSamplerYcbcrConversionFeatures &() VULKAN_HPP_NOEXCEPT
  54396. {
  54397. return *reinterpret_cast<VkPhysicalDeviceSamplerYcbcrConversionFeatures*>( this );
  54398. }
  54399. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54400. auto operator<=>( PhysicalDeviceSamplerYcbcrConversionFeatures const& ) const = default;
  54401. #else
  54402. bool operator==( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54403. {
  54404. return ( sType == rhs.sType )
  54405. && ( pNext == rhs.pNext )
  54406. && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion );
  54407. }
  54408. bool operator!=( PhysicalDeviceSamplerYcbcrConversionFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54409. {
  54410. return !operator==( rhs );
  54411. }
  54412. #endif
  54413. public:
  54414. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures;
  54415. void* pNext = {};
  54416. VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
  54417. };
  54418. static_assert( sizeof( PhysicalDeviceSamplerYcbcrConversionFeatures ) == sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ), "struct and wrapper have different size!" );
  54419. static_assert( std::is_standard_layout<PhysicalDeviceSamplerYcbcrConversionFeatures>::value, "struct wrapper is not a standard layout!" );
  54420. template <>
  54421. struct CppType<StructureType, StructureType::ePhysicalDeviceSamplerYcbcrConversionFeatures>
  54422. {
  54423. using Type = PhysicalDeviceSamplerYcbcrConversionFeatures;
  54424. };
  54425. using PhysicalDeviceSamplerYcbcrConversionFeaturesKHR = PhysicalDeviceSamplerYcbcrConversionFeatures;
  54426. struct PhysicalDeviceScalarBlockLayoutFeatures
  54427. {
  54428. static const bool allowDuplicate = false;
  54429. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
  54430. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54431. VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}) VULKAN_HPP_NOEXCEPT
  54432. : scalarBlockLayout( scalarBlockLayout_ )
  54433. {}
  54434. VULKAN_HPP_CONSTEXPR PhysicalDeviceScalarBlockLayoutFeatures( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54435. PhysicalDeviceScalarBlockLayoutFeatures( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54436. : PhysicalDeviceScalarBlockLayoutFeatures( *reinterpret_cast<PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs ) )
  54437. {}
  54438. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54439. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceScalarBlockLayoutFeatures & operator=( PhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54440. PhysicalDeviceScalarBlockLayoutFeatures & operator=( VkPhysicalDeviceScalarBlockLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54441. {
  54442. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures const *>( &rhs );
  54443. return *this;
  54444. }
  54445. PhysicalDeviceScalarBlockLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54446. {
  54447. pNext = pNext_;
  54448. return *this;
  54449. }
  54450. PhysicalDeviceScalarBlockLayoutFeatures & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
  54451. {
  54452. scalarBlockLayout = scalarBlockLayout_;
  54453. return *this;
  54454. }
  54455. operator VkPhysicalDeviceScalarBlockLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
  54456. {
  54457. return *reinterpret_cast<const VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
  54458. }
  54459. operator VkPhysicalDeviceScalarBlockLayoutFeatures &() VULKAN_HPP_NOEXCEPT
  54460. {
  54461. return *reinterpret_cast<VkPhysicalDeviceScalarBlockLayoutFeatures*>( this );
  54462. }
  54463. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54464. auto operator<=>( PhysicalDeviceScalarBlockLayoutFeatures const& ) const = default;
  54465. #else
  54466. bool operator==( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54467. {
  54468. return ( sType == rhs.sType )
  54469. && ( pNext == rhs.pNext )
  54470. && ( scalarBlockLayout == rhs.scalarBlockLayout );
  54471. }
  54472. bool operator!=( PhysicalDeviceScalarBlockLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54473. {
  54474. return !operator==( rhs );
  54475. }
  54476. #endif
  54477. public:
  54478. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceScalarBlockLayoutFeatures;
  54479. void* pNext = {};
  54480. VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
  54481. };
  54482. static_assert( sizeof( PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ), "struct and wrapper have different size!" );
  54483. static_assert( std::is_standard_layout<PhysicalDeviceScalarBlockLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
  54484. template <>
  54485. struct CppType<StructureType, StructureType::ePhysicalDeviceScalarBlockLayoutFeatures>
  54486. {
  54487. using Type = PhysicalDeviceScalarBlockLayoutFeatures;
  54488. };
  54489. using PhysicalDeviceScalarBlockLayoutFeaturesEXT = PhysicalDeviceScalarBlockLayoutFeatures;
  54490. struct PhysicalDeviceSeparateDepthStencilLayoutsFeatures
  54491. {
  54492. static const bool allowDuplicate = false;
  54493. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  54494. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54495. VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures(VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}) VULKAN_HPP_NOEXCEPT
  54496. : separateDepthStencilLayouts( separateDepthStencilLayouts_ )
  54497. {}
  54498. VULKAN_HPP_CONSTEXPR PhysicalDeviceSeparateDepthStencilLayoutsFeatures( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54499. PhysicalDeviceSeparateDepthStencilLayoutsFeatures( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54500. : PhysicalDeviceSeparateDepthStencilLayoutsFeatures( *reinterpret_cast<PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs ) )
  54501. {}
  54502. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54503. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54504. PhysicalDeviceSeparateDepthStencilLayoutsFeatures & operator=( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  54505. {
  54506. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures const *>( &rhs );
  54507. return *this;
  54508. }
  54509. PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54510. {
  54511. pNext = pNext_;
  54512. return *this;
  54513. }
  54514. PhysicalDeviceSeparateDepthStencilLayoutsFeatures & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
  54515. {
  54516. separateDepthStencilLayouts = separateDepthStencilLayouts_;
  54517. return *this;
  54518. }
  54519. operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures const&() const VULKAN_HPP_NOEXCEPT
  54520. {
  54521. return *reinterpret_cast<const VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
  54522. }
  54523. operator VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures &() VULKAN_HPP_NOEXCEPT
  54524. {
  54525. return *reinterpret_cast<VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures*>( this );
  54526. }
  54527. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54528. auto operator<=>( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& ) const = default;
  54529. #else
  54530. bool operator==( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54531. {
  54532. return ( sType == rhs.sType )
  54533. && ( pNext == rhs.pNext )
  54534. && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts );
  54535. }
  54536. bool operator!=( PhysicalDeviceSeparateDepthStencilLayoutsFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  54537. {
  54538. return !operator==( rhs );
  54539. }
  54540. #endif
  54541. public:
  54542. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  54543. void* pNext = {};
  54544. VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
  54545. };
  54546. static_assert( sizeof( PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) == sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ), "struct and wrapper have different size!" );
  54547. static_assert( std::is_standard_layout<PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value, "struct wrapper is not a standard layout!" );
  54548. template <>
  54549. struct CppType<StructureType, StructureType::ePhysicalDeviceSeparateDepthStencilLayoutsFeatures>
  54550. {
  54551. using Type = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  54552. };
  54553. using PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
  54554. struct PhysicalDeviceShaderAtomicFloatFeaturesEXT
  54555. {
  54556. static const bool allowDuplicate = false;
  54557. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
  54558. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54559. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ = {}) VULKAN_HPP_NOEXCEPT
  54560. : shaderBufferFloat32Atomics( shaderBufferFloat32Atomics_ ), shaderBufferFloat32AtomicAdd( shaderBufferFloat32AtomicAdd_ ), shaderBufferFloat64Atomics( shaderBufferFloat64Atomics_ ), shaderBufferFloat64AtomicAdd( shaderBufferFloat64AtomicAdd_ ), shaderSharedFloat32Atomics( shaderSharedFloat32Atomics_ ), shaderSharedFloat32AtomicAdd( shaderSharedFloat32AtomicAdd_ ), shaderSharedFloat64Atomics( shaderSharedFloat64Atomics_ ), shaderSharedFloat64AtomicAdd( shaderSharedFloat64AtomicAdd_ ), shaderImageFloat32Atomics( shaderImageFloat32Atomics_ ), shaderImageFloat32AtomicAdd( shaderImageFloat32AtomicAdd_ ), sparseImageFloat32Atomics( sparseImageFloat32Atomics_ ), sparseImageFloat32AtomicAdd( sparseImageFloat32AtomicAdd_ )
  54561. {}
  54562. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicFloatFeaturesEXT( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54563. PhysicalDeviceShaderAtomicFloatFeaturesEXT( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54564. : PhysicalDeviceShaderAtomicFloatFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs ) )
  54565. {}
  54566. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54567. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54568. PhysicalDeviceShaderAtomicFloatFeaturesEXT & operator=( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54569. {
  54570. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT const *>( &rhs );
  54571. return *this;
  54572. }
  54573. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54574. {
  54575. pNext = pNext_;
  54576. return *this;
  54577. }
  54578. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
  54579. {
  54580. shaderBufferFloat32Atomics = shaderBufferFloat32Atomics_;
  54581. return *this;
  54582. }
  54583. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54584. {
  54585. shaderBufferFloat32AtomicAdd = shaderBufferFloat32AtomicAdd_;
  54586. return *this;
  54587. }
  54588. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
  54589. {
  54590. shaderBufferFloat64Atomics = shaderBufferFloat64Atomics_;
  54591. return *this;
  54592. }
  54593. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderBufferFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54594. {
  54595. shaderBufferFloat64AtomicAdd = shaderBufferFloat64AtomicAdd_;
  54596. return *this;
  54597. }
  54598. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
  54599. {
  54600. shaderSharedFloat32Atomics = shaderSharedFloat32Atomics_;
  54601. return *this;
  54602. }
  54603. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54604. {
  54605. shaderSharedFloat32AtomicAdd = shaderSharedFloat32AtomicAdd_;
  54606. return *this;
  54607. }
  54608. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics_ ) VULKAN_HPP_NOEXCEPT
  54609. {
  54610. shaderSharedFloat64Atomics = shaderSharedFloat64Atomics_;
  54611. return *this;
  54612. }
  54613. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderSharedFloat64AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54614. {
  54615. shaderSharedFloat64AtomicAdd = shaderSharedFloat64AtomicAdd_;
  54616. return *this;
  54617. }
  54618. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
  54619. {
  54620. shaderImageFloat32Atomics = shaderImageFloat32Atomics_;
  54621. return *this;
  54622. }
  54623. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setShaderImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54624. {
  54625. shaderImageFloat32AtomicAdd = shaderImageFloat32AtomicAdd_;
  54626. return *this;
  54627. }
  54628. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics_ ) VULKAN_HPP_NOEXCEPT
  54629. {
  54630. sparseImageFloat32Atomics = sparseImageFloat32Atomics_;
  54631. return *this;
  54632. }
  54633. PhysicalDeviceShaderAtomicFloatFeaturesEXT & setSparseImageFloat32AtomicAdd( VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd_ ) VULKAN_HPP_NOEXCEPT
  54634. {
  54635. sparseImageFloat32AtomicAdd = sparseImageFloat32AtomicAdd_;
  54636. return *this;
  54637. }
  54638. operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  54639. {
  54640. return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
  54641. }
  54642. operator VkPhysicalDeviceShaderAtomicFloatFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  54643. {
  54644. return *reinterpret_cast<VkPhysicalDeviceShaderAtomicFloatFeaturesEXT*>( this );
  54645. }
  54646. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54647. auto operator<=>( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& ) const = default;
  54648. #else
  54649. bool operator==( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54650. {
  54651. return ( sType == rhs.sType )
  54652. && ( pNext == rhs.pNext )
  54653. && ( shaderBufferFloat32Atomics == rhs.shaderBufferFloat32Atomics )
  54654. && ( shaderBufferFloat32AtomicAdd == rhs.shaderBufferFloat32AtomicAdd )
  54655. && ( shaderBufferFloat64Atomics == rhs.shaderBufferFloat64Atomics )
  54656. && ( shaderBufferFloat64AtomicAdd == rhs.shaderBufferFloat64AtomicAdd )
  54657. && ( shaderSharedFloat32Atomics == rhs.shaderSharedFloat32Atomics )
  54658. && ( shaderSharedFloat32AtomicAdd == rhs.shaderSharedFloat32AtomicAdd )
  54659. && ( shaderSharedFloat64Atomics == rhs.shaderSharedFloat64Atomics )
  54660. && ( shaderSharedFloat64AtomicAdd == rhs.shaderSharedFloat64AtomicAdd )
  54661. && ( shaderImageFloat32Atomics == rhs.shaderImageFloat32Atomics )
  54662. && ( shaderImageFloat32AtomicAdd == rhs.shaderImageFloat32AtomicAdd )
  54663. && ( sparseImageFloat32Atomics == rhs.sparseImageFloat32Atomics )
  54664. && ( sparseImageFloat32AtomicAdd == rhs.sparseImageFloat32AtomicAdd );
  54665. }
  54666. bool operator!=( PhysicalDeviceShaderAtomicFloatFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  54667. {
  54668. return !operator==( rhs );
  54669. }
  54670. #endif
  54671. public:
  54672. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT;
  54673. void* pNext = {};
  54674. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32Atomics = {};
  54675. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat32AtomicAdd = {};
  54676. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64Atomics = {};
  54677. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferFloat64AtomicAdd = {};
  54678. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32Atomics = {};
  54679. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat32AtomicAdd = {};
  54680. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64Atomics = {};
  54681. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedFloat64AtomicAdd = {};
  54682. VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32Atomics = {};
  54683. VULKAN_HPP_NAMESPACE::Bool32 shaderImageFloat32AtomicAdd = {};
  54684. VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32Atomics = {};
  54685. VULKAN_HPP_NAMESPACE::Bool32 sparseImageFloat32AtomicAdd = {};
  54686. };
  54687. static_assert( sizeof( PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ), "struct and wrapper have different size!" );
  54688. static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  54689. template <>
  54690. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicFloatFeaturesEXT>
  54691. {
  54692. using Type = PhysicalDeviceShaderAtomicFloatFeaturesEXT;
  54693. };
  54694. struct PhysicalDeviceShaderAtomicInt64Features
  54695. {
  54696. static const bool allowDuplicate = false;
  54697. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
  54698. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54699. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features(VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
  54700. : shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ )
  54701. {}
  54702. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderAtomicInt64Features( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54703. PhysicalDeviceShaderAtomicInt64Features( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
  54704. : PhysicalDeviceShaderAtomicInt64Features( *reinterpret_cast<PhysicalDeviceShaderAtomicInt64Features const *>( &rhs ) )
  54705. {}
  54706. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54707. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderAtomicInt64Features & operator=( PhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54708. PhysicalDeviceShaderAtomicInt64Features & operator=( VkPhysicalDeviceShaderAtomicInt64Features const & rhs ) VULKAN_HPP_NOEXCEPT
  54709. {
  54710. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features const *>( &rhs );
  54711. return *this;
  54712. }
  54713. PhysicalDeviceShaderAtomicInt64Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54714. {
  54715. pNext = pNext_;
  54716. return *this;
  54717. }
  54718. PhysicalDeviceShaderAtomicInt64Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  54719. {
  54720. shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
  54721. return *this;
  54722. }
  54723. PhysicalDeviceShaderAtomicInt64Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  54724. {
  54725. shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
  54726. return *this;
  54727. }
  54728. operator VkPhysicalDeviceShaderAtomicInt64Features const&() const VULKAN_HPP_NOEXCEPT
  54729. {
  54730. return *reinterpret_cast<const VkPhysicalDeviceShaderAtomicInt64Features*>( this );
  54731. }
  54732. operator VkPhysicalDeviceShaderAtomicInt64Features &() VULKAN_HPP_NOEXCEPT
  54733. {
  54734. return *reinterpret_cast<VkPhysicalDeviceShaderAtomicInt64Features*>( this );
  54735. }
  54736. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54737. auto operator<=>( PhysicalDeviceShaderAtomicInt64Features const& ) const = default;
  54738. #else
  54739. bool operator==( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  54740. {
  54741. return ( sType == rhs.sType )
  54742. && ( pNext == rhs.pNext )
  54743. && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
  54744. && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics );
  54745. }
  54746. bool operator!=( PhysicalDeviceShaderAtomicInt64Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  54747. {
  54748. return !operator==( rhs );
  54749. }
  54750. #endif
  54751. public:
  54752. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderAtomicInt64Features;
  54753. void* pNext = {};
  54754. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
  54755. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
  54756. };
  54757. static_assert( sizeof( PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ), "struct and wrapper have different size!" );
  54758. static_assert( std::is_standard_layout<PhysicalDeviceShaderAtomicInt64Features>::value, "struct wrapper is not a standard layout!" );
  54759. template <>
  54760. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderAtomicInt64Features>
  54761. {
  54762. using Type = PhysicalDeviceShaderAtomicInt64Features;
  54763. };
  54764. using PhysicalDeviceShaderAtomicInt64FeaturesKHR = PhysicalDeviceShaderAtomicInt64Features;
  54765. struct PhysicalDeviceShaderClockFeaturesKHR
  54766. {
  54767. static const bool allowDuplicate = false;
  54768. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
  54769. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54770. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ = {}) VULKAN_HPP_NOEXCEPT
  54771. : shaderSubgroupClock( shaderSubgroupClock_ ), shaderDeviceClock( shaderDeviceClock_ )
  54772. {}
  54773. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderClockFeaturesKHR( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54774. PhysicalDeviceShaderClockFeaturesKHR( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  54775. : PhysicalDeviceShaderClockFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs ) )
  54776. {}
  54777. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54778. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderClockFeaturesKHR & operator=( PhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54779. PhysicalDeviceShaderClockFeaturesKHR & operator=( VkPhysicalDeviceShaderClockFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  54780. {
  54781. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR const *>( &rhs );
  54782. return *this;
  54783. }
  54784. PhysicalDeviceShaderClockFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54785. {
  54786. pNext = pNext_;
  54787. return *this;
  54788. }
  54789. PhysicalDeviceShaderClockFeaturesKHR & setShaderSubgroupClock( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock_ ) VULKAN_HPP_NOEXCEPT
  54790. {
  54791. shaderSubgroupClock = shaderSubgroupClock_;
  54792. return *this;
  54793. }
  54794. PhysicalDeviceShaderClockFeaturesKHR & setShaderDeviceClock( VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock_ ) VULKAN_HPP_NOEXCEPT
  54795. {
  54796. shaderDeviceClock = shaderDeviceClock_;
  54797. return *this;
  54798. }
  54799. operator VkPhysicalDeviceShaderClockFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  54800. {
  54801. return *reinterpret_cast<const VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
  54802. }
  54803. operator VkPhysicalDeviceShaderClockFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  54804. {
  54805. return *reinterpret_cast<VkPhysicalDeviceShaderClockFeaturesKHR*>( this );
  54806. }
  54807. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54808. auto operator<=>( PhysicalDeviceShaderClockFeaturesKHR const& ) const = default;
  54809. #else
  54810. bool operator==( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  54811. {
  54812. return ( sType == rhs.sType )
  54813. && ( pNext == rhs.pNext )
  54814. && ( shaderSubgroupClock == rhs.shaderSubgroupClock )
  54815. && ( shaderDeviceClock == rhs.shaderDeviceClock );
  54816. }
  54817. bool operator!=( PhysicalDeviceShaderClockFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  54818. {
  54819. return !operator==( rhs );
  54820. }
  54821. #endif
  54822. public:
  54823. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderClockFeaturesKHR;
  54824. void* pNext = {};
  54825. VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupClock = {};
  54826. VULKAN_HPP_NAMESPACE::Bool32 shaderDeviceClock = {};
  54827. };
  54828. static_assert( sizeof( PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ), "struct and wrapper have different size!" );
  54829. static_assert( std::is_standard_layout<PhysicalDeviceShaderClockFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  54830. template <>
  54831. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderClockFeaturesKHR>
  54832. {
  54833. using Type = PhysicalDeviceShaderClockFeaturesKHR;
  54834. };
  54835. struct PhysicalDeviceShaderCoreProperties2AMD
  54836. {
  54837. static const bool allowDuplicate = false;
  54838. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
  54839. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54840. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD(VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures_ = {}, uint32_t activeComputeUnitCount_ = {}) VULKAN_HPP_NOEXCEPT
  54841. : shaderCoreFeatures( shaderCoreFeatures_ ), activeComputeUnitCount( activeComputeUnitCount_ )
  54842. {}
  54843. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCoreProperties2AMD( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54844. PhysicalDeviceShaderCoreProperties2AMD( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
  54845. : PhysicalDeviceShaderCoreProperties2AMD( *reinterpret_cast<PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs ) )
  54846. {}
  54847. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54848. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCoreProperties2AMD & operator=( PhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54849. PhysicalDeviceShaderCoreProperties2AMD & operator=( VkPhysicalDeviceShaderCoreProperties2AMD const & rhs ) VULKAN_HPP_NOEXCEPT
  54850. {
  54851. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD const *>( &rhs );
  54852. return *this;
  54853. }
  54854. operator VkPhysicalDeviceShaderCoreProperties2AMD const&() const VULKAN_HPP_NOEXCEPT
  54855. {
  54856. return *reinterpret_cast<const VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
  54857. }
  54858. operator VkPhysicalDeviceShaderCoreProperties2AMD &() VULKAN_HPP_NOEXCEPT
  54859. {
  54860. return *reinterpret_cast<VkPhysicalDeviceShaderCoreProperties2AMD*>( this );
  54861. }
  54862. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54863. auto operator<=>( PhysicalDeviceShaderCoreProperties2AMD const& ) const = default;
  54864. #else
  54865. bool operator==( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  54866. {
  54867. return ( sType == rhs.sType )
  54868. && ( pNext == rhs.pNext )
  54869. && ( shaderCoreFeatures == rhs.shaderCoreFeatures )
  54870. && ( activeComputeUnitCount == rhs.activeComputeUnitCount );
  54871. }
  54872. bool operator!=( PhysicalDeviceShaderCoreProperties2AMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  54873. {
  54874. return !operator==( rhs );
  54875. }
  54876. #endif
  54877. public:
  54878. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCoreProperties2AMD;
  54879. void* pNext = {};
  54880. VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD shaderCoreFeatures = {};
  54881. uint32_t activeComputeUnitCount = {};
  54882. };
  54883. static_assert( sizeof( PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ), "struct and wrapper have different size!" );
  54884. static_assert( std::is_standard_layout<PhysicalDeviceShaderCoreProperties2AMD>::value, "struct wrapper is not a standard layout!" );
  54885. template <>
  54886. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCoreProperties2AMD>
  54887. {
  54888. using Type = PhysicalDeviceShaderCoreProperties2AMD;
  54889. };
  54890. struct PhysicalDeviceShaderCorePropertiesAMD
  54891. {
  54892. static const bool allowDuplicate = false;
  54893. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
  54894. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54895. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD(uint32_t shaderEngineCount_ = {}, uint32_t shaderArraysPerEngineCount_ = {}, uint32_t computeUnitsPerShaderArray_ = {}, uint32_t simdPerComputeUnit_ = {}, uint32_t wavefrontsPerSimd_ = {}, uint32_t wavefrontSize_ = {}, uint32_t sgprsPerSimd_ = {}, uint32_t minSgprAllocation_ = {}, uint32_t maxSgprAllocation_ = {}, uint32_t sgprAllocationGranularity_ = {}, uint32_t vgprsPerSimd_ = {}, uint32_t minVgprAllocation_ = {}, uint32_t maxVgprAllocation_ = {}, uint32_t vgprAllocationGranularity_ = {}) VULKAN_HPP_NOEXCEPT
  54896. : shaderEngineCount( shaderEngineCount_ ), shaderArraysPerEngineCount( shaderArraysPerEngineCount_ ), computeUnitsPerShaderArray( computeUnitsPerShaderArray_ ), simdPerComputeUnit( simdPerComputeUnit_ ), wavefrontsPerSimd( wavefrontsPerSimd_ ), wavefrontSize( wavefrontSize_ ), sgprsPerSimd( sgprsPerSimd_ ), minSgprAllocation( minSgprAllocation_ ), maxSgprAllocation( maxSgprAllocation_ ), sgprAllocationGranularity( sgprAllocationGranularity_ ), vgprsPerSimd( vgprsPerSimd_ ), minVgprAllocation( minVgprAllocation_ ), maxVgprAllocation( maxVgprAllocation_ ), vgprAllocationGranularity( vgprAllocationGranularity_ )
  54897. {}
  54898. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderCorePropertiesAMD( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54899. PhysicalDeviceShaderCorePropertiesAMD( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  54900. : PhysicalDeviceShaderCorePropertiesAMD( *reinterpret_cast<PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs ) )
  54901. {}
  54902. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54903. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderCorePropertiesAMD & operator=( PhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54904. PhysicalDeviceShaderCorePropertiesAMD & operator=( VkPhysicalDeviceShaderCorePropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  54905. {
  54906. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD const *>( &rhs );
  54907. return *this;
  54908. }
  54909. operator VkPhysicalDeviceShaderCorePropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
  54910. {
  54911. return *reinterpret_cast<const VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
  54912. }
  54913. operator VkPhysicalDeviceShaderCorePropertiesAMD &() VULKAN_HPP_NOEXCEPT
  54914. {
  54915. return *reinterpret_cast<VkPhysicalDeviceShaderCorePropertiesAMD*>( this );
  54916. }
  54917. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  54918. auto operator<=>( PhysicalDeviceShaderCorePropertiesAMD const& ) const = default;
  54919. #else
  54920. bool operator==( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  54921. {
  54922. return ( sType == rhs.sType )
  54923. && ( pNext == rhs.pNext )
  54924. && ( shaderEngineCount == rhs.shaderEngineCount )
  54925. && ( shaderArraysPerEngineCount == rhs.shaderArraysPerEngineCount )
  54926. && ( computeUnitsPerShaderArray == rhs.computeUnitsPerShaderArray )
  54927. && ( simdPerComputeUnit == rhs.simdPerComputeUnit )
  54928. && ( wavefrontsPerSimd == rhs.wavefrontsPerSimd )
  54929. && ( wavefrontSize == rhs.wavefrontSize )
  54930. && ( sgprsPerSimd == rhs.sgprsPerSimd )
  54931. && ( minSgprAllocation == rhs.minSgprAllocation )
  54932. && ( maxSgprAllocation == rhs.maxSgprAllocation )
  54933. && ( sgprAllocationGranularity == rhs.sgprAllocationGranularity )
  54934. && ( vgprsPerSimd == rhs.vgprsPerSimd )
  54935. && ( minVgprAllocation == rhs.minVgprAllocation )
  54936. && ( maxVgprAllocation == rhs.maxVgprAllocation )
  54937. && ( vgprAllocationGranularity == rhs.vgprAllocationGranularity );
  54938. }
  54939. bool operator!=( PhysicalDeviceShaderCorePropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  54940. {
  54941. return !operator==( rhs );
  54942. }
  54943. #endif
  54944. public:
  54945. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderCorePropertiesAMD;
  54946. void* pNext = {};
  54947. uint32_t shaderEngineCount = {};
  54948. uint32_t shaderArraysPerEngineCount = {};
  54949. uint32_t computeUnitsPerShaderArray = {};
  54950. uint32_t simdPerComputeUnit = {};
  54951. uint32_t wavefrontsPerSimd = {};
  54952. uint32_t wavefrontSize = {};
  54953. uint32_t sgprsPerSimd = {};
  54954. uint32_t minSgprAllocation = {};
  54955. uint32_t maxSgprAllocation = {};
  54956. uint32_t sgprAllocationGranularity = {};
  54957. uint32_t vgprsPerSimd = {};
  54958. uint32_t minVgprAllocation = {};
  54959. uint32_t maxVgprAllocation = {};
  54960. uint32_t vgprAllocationGranularity = {};
  54961. };
  54962. static_assert( sizeof( PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ), "struct and wrapper have different size!" );
  54963. static_assert( std::is_standard_layout<PhysicalDeviceShaderCorePropertiesAMD>::value, "struct wrapper is not a standard layout!" );
  54964. template <>
  54965. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderCorePropertiesAMD>
  54966. {
  54967. using Type = PhysicalDeviceShaderCorePropertiesAMD;
  54968. };
  54969. struct PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT
  54970. {
  54971. static const bool allowDuplicate = false;
  54972. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
  54973. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54974. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ = {}) VULKAN_HPP_NOEXCEPT
  54975. : shaderDemoteToHelperInvocation( shaderDemoteToHelperInvocation_ )
  54976. {}
  54977. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54978. PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54979. : PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>( &rhs ) )
  54980. {}
  54981. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  54982. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  54983. PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & operator=( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  54984. {
  54985. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const *>( &rhs );
  54986. return *this;
  54987. }
  54988. PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  54989. {
  54990. pNext = pNext_;
  54991. return *this;
  54992. }
  54993. PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT & setShaderDemoteToHelperInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation_ ) VULKAN_HPP_NOEXCEPT
  54994. {
  54995. shaderDemoteToHelperInvocation = shaderDemoteToHelperInvocation_;
  54996. return *this;
  54997. }
  54998. operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  54999. {
  55000. return *reinterpret_cast<const VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
  55001. }
  55002. operator VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  55003. {
  55004. return *reinterpret_cast<VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT*>( this );
  55005. }
  55006. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55007. auto operator<=>( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& ) const = default;
  55008. #else
  55009. bool operator==( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55010. {
  55011. return ( sType == rhs.sType )
  55012. && ( pNext == rhs.pNext )
  55013. && ( shaderDemoteToHelperInvocation == rhs.shaderDemoteToHelperInvocation );
  55014. }
  55015. bool operator!=( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55016. {
  55017. return !operator==( rhs );
  55018. }
  55019. #endif
  55020. public:
  55021. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
  55022. void* pNext = {};
  55023. VULKAN_HPP_NAMESPACE::Bool32 shaderDemoteToHelperInvocation = {};
  55024. };
  55025. static_assert( sizeof( PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT ), "struct and wrapper have different size!" );
  55026. static_assert( std::is_standard_layout<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  55027. template <>
  55028. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT>
  55029. {
  55030. using Type = PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
  55031. };
  55032. struct PhysicalDeviceShaderDrawParametersFeatures
  55033. {
  55034. static const bool allowDuplicate = false;
  55035. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
  55036. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55037. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
  55038. : shaderDrawParameters( shaderDrawParameters_ )
  55039. {}
  55040. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderDrawParametersFeatures( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55041. PhysicalDeviceShaderDrawParametersFeatures( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  55042. : PhysicalDeviceShaderDrawParametersFeatures( *reinterpret_cast<PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs ) )
  55043. {}
  55044. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55045. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderDrawParametersFeatures & operator=( PhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55046. PhysicalDeviceShaderDrawParametersFeatures & operator=( VkPhysicalDeviceShaderDrawParametersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  55047. {
  55048. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures const *>( &rhs );
  55049. return *this;
  55050. }
  55051. PhysicalDeviceShaderDrawParametersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55052. {
  55053. pNext = pNext_;
  55054. return *this;
  55055. }
  55056. PhysicalDeviceShaderDrawParametersFeatures & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
  55057. {
  55058. shaderDrawParameters = shaderDrawParameters_;
  55059. return *this;
  55060. }
  55061. operator VkPhysicalDeviceShaderDrawParametersFeatures const&() const VULKAN_HPP_NOEXCEPT
  55062. {
  55063. return *reinterpret_cast<const VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
  55064. }
  55065. operator VkPhysicalDeviceShaderDrawParametersFeatures &() VULKAN_HPP_NOEXCEPT
  55066. {
  55067. return *reinterpret_cast<VkPhysicalDeviceShaderDrawParametersFeatures*>( this );
  55068. }
  55069. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55070. auto operator<=>( PhysicalDeviceShaderDrawParametersFeatures const& ) const = default;
  55071. #else
  55072. bool operator==( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  55073. {
  55074. return ( sType == rhs.sType )
  55075. && ( pNext == rhs.pNext )
  55076. && ( shaderDrawParameters == rhs.shaderDrawParameters );
  55077. }
  55078. bool operator!=( PhysicalDeviceShaderDrawParametersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  55079. {
  55080. return !operator==( rhs );
  55081. }
  55082. #endif
  55083. public:
  55084. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderDrawParametersFeatures;
  55085. void* pNext = {};
  55086. VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
  55087. };
  55088. static_assert( sizeof( PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ), "struct and wrapper have different size!" );
  55089. static_assert( std::is_standard_layout<PhysicalDeviceShaderDrawParametersFeatures>::value, "struct wrapper is not a standard layout!" );
  55090. template <>
  55091. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderDrawParametersFeatures>
  55092. {
  55093. using Type = PhysicalDeviceShaderDrawParametersFeatures;
  55094. };
  55095. using PhysicalDeviceShaderDrawParameterFeatures = PhysicalDeviceShaderDrawParametersFeatures;
  55096. struct PhysicalDeviceShaderFloat16Int8Features
  55097. {
  55098. static const bool allowDuplicate = false;
  55099. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
  55100. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55101. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features(VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}) VULKAN_HPP_NOEXCEPT
  55102. : shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ )
  55103. {}
  55104. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderFloat16Int8Features( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55105. PhysicalDeviceShaderFloat16Int8Features( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
  55106. : PhysicalDeviceShaderFloat16Int8Features( *reinterpret_cast<PhysicalDeviceShaderFloat16Int8Features const *>( &rhs ) )
  55107. {}
  55108. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55109. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderFloat16Int8Features & operator=( PhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55110. PhysicalDeviceShaderFloat16Int8Features & operator=( VkPhysicalDeviceShaderFloat16Int8Features const & rhs ) VULKAN_HPP_NOEXCEPT
  55111. {
  55112. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features const *>( &rhs );
  55113. return *this;
  55114. }
  55115. PhysicalDeviceShaderFloat16Int8Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55116. {
  55117. pNext = pNext_;
  55118. return *this;
  55119. }
  55120. PhysicalDeviceShaderFloat16Int8Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
  55121. {
  55122. shaderFloat16 = shaderFloat16_;
  55123. return *this;
  55124. }
  55125. PhysicalDeviceShaderFloat16Int8Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
  55126. {
  55127. shaderInt8 = shaderInt8_;
  55128. return *this;
  55129. }
  55130. operator VkPhysicalDeviceShaderFloat16Int8Features const&() const VULKAN_HPP_NOEXCEPT
  55131. {
  55132. return *reinterpret_cast<const VkPhysicalDeviceShaderFloat16Int8Features*>( this );
  55133. }
  55134. operator VkPhysicalDeviceShaderFloat16Int8Features &() VULKAN_HPP_NOEXCEPT
  55135. {
  55136. return *reinterpret_cast<VkPhysicalDeviceShaderFloat16Int8Features*>( this );
  55137. }
  55138. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55139. auto operator<=>( PhysicalDeviceShaderFloat16Int8Features const& ) const = default;
  55140. #else
  55141. bool operator==( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  55142. {
  55143. return ( sType == rhs.sType )
  55144. && ( pNext == rhs.pNext )
  55145. && ( shaderFloat16 == rhs.shaderFloat16 )
  55146. && ( shaderInt8 == rhs.shaderInt8 );
  55147. }
  55148. bool operator!=( PhysicalDeviceShaderFloat16Int8Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  55149. {
  55150. return !operator==( rhs );
  55151. }
  55152. #endif
  55153. public:
  55154. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderFloat16Int8Features;
  55155. void* pNext = {};
  55156. VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
  55157. VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
  55158. };
  55159. static_assert( sizeof( PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ), "struct and wrapper have different size!" );
  55160. static_assert( std::is_standard_layout<PhysicalDeviceShaderFloat16Int8Features>::value, "struct wrapper is not a standard layout!" );
  55161. template <>
  55162. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderFloat16Int8Features>
  55163. {
  55164. using Type = PhysicalDeviceShaderFloat16Int8Features;
  55165. };
  55166. using PhysicalDeviceFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
  55167. using PhysicalDeviceShaderFloat16Int8FeaturesKHR = PhysicalDeviceShaderFloat16Int8Features;
  55168. struct PhysicalDeviceShaderImageAtomicInt64FeaturesEXT
  55169. {
  55170. static const bool allowDuplicate = false;
  55171. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  55172. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55173. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ = {}) VULKAN_HPP_NOEXCEPT
  55174. : shaderImageInt64Atomics( shaderImageInt64Atomics_ ), sparseImageInt64Atomics( sparseImageInt64Atomics_ )
  55175. {}
  55176. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55177. PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55178. : PhysicalDeviceShaderImageAtomicInt64FeaturesEXT( *reinterpret_cast<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs ) )
  55179. {}
  55180. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55181. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55182. PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & operator=( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55183. {
  55184. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const *>( &rhs );
  55185. return *this;
  55186. }
  55187. PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55188. {
  55189. pNext = pNext_;
  55190. return *this;
  55191. }
  55192. PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setShaderImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  55193. {
  55194. shaderImageInt64Atomics = shaderImageInt64Atomics_;
  55195. return *this;
  55196. }
  55197. PhysicalDeviceShaderImageAtomicInt64FeaturesEXT & setSparseImageInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  55198. {
  55199. sparseImageInt64Atomics = sparseImageInt64Atomics_;
  55200. return *this;
  55201. }
  55202. operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  55203. {
  55204. return *reinterpret_cast<const VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
  55205. }
  55206. operator VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT &() VULKAN_HPP_NOEXCEPT
  55207. {
  55208. return *reinterpret_cast<VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT*>( this );
  55209. }
  55210. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55211. auto operator<=>( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& ) const = default;
  55212. #else
  55213. bool operator==( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55214. {
  55215. return ( sType == rhs.sType )
  55216. && ( pNext == rhs.pNext )
  55217. && ( shaderImageInt64Atomics == rhs.shaderImageInt64Atomics )
  55218. && ( sparseImageInt64Atomics == rhs.sparseImageInt64Atomics );
  55219. }
  55220. bool operator!=( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55221. {
  55222. return !operator==( rhs );
  55223. }
  55224. #endif
  55225. public:
  55226. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  55227. void* pNext = {};
  55228. VULKAN_HPP_NAMESPACE::Bool32 shaderImageInt64Atomics = {};
  55229. VULKAN_HPP_NAMESPACE::Bool32 sparseImageInt64Atomics = {};
  55230. };
  55231. static_assert( sizeof( PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) == sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ), "struct and wrapper have different size!" );
  55232. static_assert( std::is_standard_layout<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  55233. template <>
  55234. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT>
  55235. {
  55236. using Type = PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
  55237. };
  55238. struct PhysicalDeviceShaderImageFootprintFeaturesNV
  55239. {
  55240. static const bool allowDuplicate = false;
  55241. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
  55242. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55243. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ = {}) VULKAN_HPP_NOEXCEPT
  55244. : imageFootprint( imageFootprint_ )
  55245. {}
  55246. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderImageFootprintFeaturesNV( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55247. PhysicalDeviceShaderImageFootprintFeaturesNV( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55248. : PhysicalDeviceShaderImageFootprintFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs ) )
  55249. {}
  55250. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55251. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( PhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55252. PhysicalDeviceShaderImageFootprintFeaturesNV & operator=( VkPhysicalDeviceShaderImageFootprintFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55253. {
  55254. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV const *>( &rhs );
  55255. return *this;
  55256. }
  55257. PhysicalDeviceShaderImageFootprintFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55258. {
  55259. pNext = pNext_;
  55260. return *this;
  55261. }
  55262. PhysicalDeviceShaderImageFootprintFeaturesNV & setImageFootprint( VULKAN_HPP_NAMESPACE::Bool32 imageFootprint_ ) VULKAN_HPP_NOEXCEPT
  55263. {
  55264. imageFootprint = imageFootprint_;
  55265. return *this;
  55266. }
  55267. operator VkPhysicalDeviceShaderImageFootprintFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  55268. {
  55269. return *reinterpret_cast<const VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
  55270. }
  55271. operator VkPhysicalDeviceShaderImageFootprintFeaturesNV &() VULKAN_HPP_NOEXCEPT
  55272. {
  55273. return *reinterpret_cast<VkPhysicalDeviceShaderImageFootprintFeaturesNV*>( this );
  55274. }
  55275. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55276. auto operator<=>( PhysicalDeviceShaderImageFootprintFeaturesNV const& ) const = default;
  55277. #else
  55278. bool operator==( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55279. {
  55280. return ( sType == rhs.sType )
  55281. && ( pNext == rhs.pNext )
  55282. && ( imageFootprint == rhs.imageFootprint );
  55283. }
  55284. bool operator!=( PhysicalDeviceShaderImageFootprintFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55285. {
  55286. return !operator==( rhs );
  55287. }
  55288. #endif
  55289. public:
  55290. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV;
  55291. void* pNext = {};
  55292. VULKAN_HPP_NAMESPACE::Bool32 imageFootprint = {};
  55293. };
  55294. static_assert( sizeof( PhysicalDeviceShaderImageFootprintFeaturesNV ) == sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ), "struct and wrapper have different size!" );
  55295. static_assert( std::is_standard_layout<PhysicalDeviceShaderImageFootprintFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  55296. template <>
  55297. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderImageFootprintFeaturesNV>
  55298. {
  55299. using Type = PhysicalDeviceShaderImageFootprintFeaturesNV;
  55300. };
  55301. struct PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL
  55302. {
  55303. static const bool allowDuplicate = false;
  55304. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  55305. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55306. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL(VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ = {}) VULKAN_HPP_NOEXCEPT
  55307. : shaderIntegerFunctions2( shaderIntegerFunctions2_ )
  55308. {}
  55309. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55310. PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  55311. : PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL( *reinterpret_cast<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs ) )
  55312. {}
  55313. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55314. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55315. PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & operator=( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  55316. {
  55317. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const *>( &rhs );
  55318. return *this;
  55319. }
  55320. PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55321. {
  55322. pNext = pNext_;
  55323. return *this;
  55324. }
  55325. PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL & setShaderIntegerFunctions2( VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2_ ) VULKAN_HPP_NOEXCEPT
  55326. {
  55327. shaderIntegerFunctions2 = shaderIntegerFunctions2_;
  55328. return *this;
  55329. }
  55330. operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const&() const VULKAN_HPP_NOEXCEPT
  55331. {
  55332. return *reinterpret_cast<const VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
  55333. }
  55334. operator VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL &() VULKAN_HPP_NOEXCEPT
  55335. {
  55336. return *reinterpret_cast<VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL*>( this );
  55337. }
  55338. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55339. auto operator<=>( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& ) const = default;
  55340. #else
  55341. bool operator==( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  55342. {
  55343. return ( sType == rhs.sType )
  55344. && ( pNext == rhs.pNext )
  55345. && ( shaderIntegerFunctions2 == rhs.shaderIntegerFunctions2 );
  55346. }
  55347. bool operator!=( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  55348. {
  55349. return !operator==( rhs );
  55350. }
  55351. #endif
  55352. public:
  55353. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  55354. void* pNext = {};
  55355. VULKAN_HPP_NAMESPACE::Bool32 shaderIntegerFunctions2 = {};
  55356. };
  55357. static_assert( sizeof( PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) == sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ), "struct and wrapper have different size!" );
  55358. static_assert( std::is_standard_layout<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value, "struct wrapper is not a standard layout!" );
  55359. template <>
  55360. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>
  55361. {
  55362. using Type = PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
  55363. };
  55364. struct PhysicalDeviceShaderSMBuiltinsFeaturesNV
  55365. {
  55366. static const bool allowDuplicate = false;
  55367. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
  55368. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55369. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ = {}) VULKAN_HPP_NOEXCEPT
  55370. : shaderSMBuiltins( shaderSMBuiltins_ )
  55371. {}
  55372. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsFeaturesNV( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55373. PhysicalDeviceShaderSMBuiltinsFeaturesNV( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55374. : PhysicalDeviceShaderSMBuiltinsFeaturesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs ) )
  55375. {}
  55376. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55377. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55378. PhysicalDeviceShaderSMBuiltinsFeaturesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55379. {
  55380. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV const *>( &rhs );
  55381. return *this;
  55382. }
  55383. PhysicalDeviceShaderSMBuiltinsFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55384. {
  55385. pNext = pNext_;
  55386. return *this;
  55387. }
  55388. PhysicalDeviceShaderSMBuiltinsFeaturesNV & setShaderSMBuiltins( VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins_ ) VULKAN_HPP_NOEXCEPT
  55389. {
  55390. shaderSMBuiltins = shaderSMBuiltins_;
  55391. return *this;
  55392. }
  55393. operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  55394. {
  55395. return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
  55396. }
  55397. operator VkPhysicalDeviceShaderSMBuiltinsFeaturesNV &() VULKAN_HPP_NOEXCEPT
  55398. {
  55399. return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsFeaturesNV*>( this );
  55400. }
  55401. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55402. auto operator<=>( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& ) const = default;
  55403. #else
  55404. bool operator==( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55405. {
  55406. return ( sType == rhs.sType )
  55407. && ( pNext == rhs.pNext )
  55408. && ( shaderSMBuiltins == rhs.shaderSMBuiltins );
  55409. }
  55410. bool operator!=( PhysicalDeviceShaderSMBuiltinsFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55411. {
  55412. return !operator==( rhs );
  55413. }
  55414. #endif
  55415. public:
  55416. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV;
  55417. void* pNext = {};
  55418. VULKAN_HPP_NAMESPACE::Bool32 shaderSMBuiltins = {};
  55419. };
  55420. static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ), "struct and wrapper have different size!" );
  55421. static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  55422. template <>
  55423. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsFeaturesNV>
  55424. {
  55425. using Type = PhysicalDeviceShaderSMBuiltinsFeaturesNV;
  55426. };
  55427. struct PhysicalDeviceShaderSMBuiltinsPropertiesNV
  55428. {
  55429. static const bool allowDuplicate = false;
  55430. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
  55431. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55432. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV(uint32_t shaderSMCount_ = {}, uint32_t shaderWarpsPerSM_ = {}) VULKAN_HPP_NOEXCEPT
  55433. : shaderSMCount( shaderSMCount_ ), shaderWarpsPerSM( shaderWarpsPerSM_ )
  55434. {}
  55435. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSMBuiltinsPropertiesNV( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55436. PhysicalDeviceShaderSMBuiltinsPropertiesNV( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55437. : PhysicalDeviceShaderSMBuiltinsPropertiesNV( *reinterpret_cast<PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs ) )
  55438. {}
  55439. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55440. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55441. PhysicalDeviceShaderSMBuiltinsPropertiesNV & operator=( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55442. {
  55443. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV const *>( &rhs );
  55444. return *this;
  55445. }
  55446. operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  55447. {
  55448. return *reinterpret_cast<const VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
  55449. }
  55450. operator VkPhysicalDeviceShaderSMBuiltinsPropertiesNV &() VULKAN_HPP_NOEXCEPT
  55451. {
  55452. return *reinterpret_cast<VkPhysicalDeviceShaderSMBuiltinsPropertiesNV*>( this );
  55453. }
  55454. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55455. auto operator<=>( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& ) const = default;
  55456. #else
  55457. bool operator==( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55458. {
  55459. return ( sType == rhs.sType )
  55460. && ( pNext == rhs.pNext )
  55461. && ( shaderSMCount == rhs.shaderSMCount )
  55462. && ( shaderWarpsPerSM == rhs.shaderWarpsPerSM );
  55463. }
  55464. bool operator!=( PhysicalDeviceShaderSMBuiltinsPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55465. {
  55466. return !operator==( rhs );
  55467. }
  55468. #endif
  55469. public:
  55470. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV;
  55471. void* pNext = {};
  55472. uint32_t shaderSMCount = {};
  55473. uint32_t shaderWarpsPerSM = {};
  55474. };
  55475. static_assert( sizeof( PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ), "struct and wrapper have different size!" );
  55476. static_assert( std::is_standard_layout<PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  55477. template <>
  55478. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSmBuiltinsPropertiesNV>
  55479. {
  55480. using Type = PhysicalDeviceShaderSMBuiltinsPropertiesNV;
  55481. };
  55482. struct PhysicalDeviceShaderSubgroupExtendedTypesFeatures
  55483. {
  55484. static const bool allowDuplicate = false;
  55485. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  55486. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55487. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures(VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}) VULKAN_HPP_NOEXCEPT
  55488. : shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ )
  55489. {}
  55490. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderSubgroupExtendedTypesFeatures( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55491. PhysicalDeviceShaderSubgroupExtendedTypesFeatures( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  55492. : PhysicalDeviceShaderSubgroupExtendedTypesFeatures( *reinterpret_cast<PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs ) )
  55493. {}
  55494. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55495. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55496. PhysicalDeviceShaderSubgroupExtendedTypesFeatures & operator=( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  55497. {
  55498. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures const *>( &rhs );
  55499. return *this;
  55500. }
  55501. PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55502. {
  55503. pNext = pNext_;
  55504. return *this;
  55505. }
  55506. PhysicalDeviceShaderSubgroupExtendedTypesFeatures & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
  55507. {
  55508. shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
  55509. return *this;
  55510. }
  55511. operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures const&() const VULKAN_HPP_NOEXCEPT
  55512. {
  55513. return *reinterpret_cast<const VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
  55514. }
  55515. operator VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures &() VULKAN_HPP_NOEXCEPT
  55516. {
  55517. return *reinterpret_cast<VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures*>( this );
  55518. }
  55519. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55520. auto operator<=>( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& ) const = default;
  55521. #else
  55522. bool operator==( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  55523. {
  55524. return ( sType == rhs.sType )
  55525. && ( pNext == rhs.pNext )
  55526. && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes );
  55527. }
  55528. bool operator!=( PhysicalDeviceShaderSubgroupExtendedTypesFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  55529. {
  55530. return !operator==( rhs );
  55531. }
  55532. #endif
  55533. public:
  55534. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  55535. void* pNext = {};
  55536. VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
  55537. };
  55538. static_assert( sizeof( PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) == sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ), "struct and wrapper have different size!" );
  55539. static_assert( std::is_standard_layout<PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value, "struct wrapper is not a standard layout!" );
  55540. template <>
  55541. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderSubgroupExtendedTypesFeatures>
  55542. {
  55543. using Type = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  55544. };
  55545. using PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
  55546. struct PhysicalDeviceShaderTerminateInvocationFeaturesKHR
  55547. {
  55548. static const bool allowDuplicate = false;
  55549. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;
  55550. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55551. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ = {}) VULKAN_HPP_NOEXCEPT
  55552. : shaderTerminateInvocation( shaderTerminateInvocation_ )
  55553. {}
  55554. VULKAN_HPP_CONSTEXPR PhysicalDeviceShaderTerminateInvocationFeaturesKHR( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55555. PhysicalDeviceShaderTerminateInvocationFeaturesKHR( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  55556. : PhysicalDeviceShaderTerminateInvocationFeaturesKHR( *reinterpret_cast<PhysicalDeviceShaderTerminateInvocationFeaturesKHR const *>( &rhs ) )
  55557. {}
  55558. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55559. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShaderTerminateInvocationFeaturesKHR & operator=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55560. PhysicalDeviceShaderTerminateInvocationFeaturesKHR & operator=( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  55561. {
  55562. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR const *>( &rhs );
  55563. return *this;
  55564. }
  55565. PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55566. {
  55567. pNext = pNext_;
  55568. return *this;
  55569. }
  55570. PhysicalDeviceShaderTerminateInvocationFeaturesKHR & setShaderTerminateInvocation( VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation_ ) VULKAN_HPP_NOEXCEPT
  55571. {
  55572. shaderTerminateInvocation = shaderTerminateInvocation_;
  55573. return *this;
  55574. }
  55575. operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  55576. {
  55577. return *reinterpret_cast<const VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR*>( this );
  55578. }
  55579. operator VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  55580. {
  55581. return *reinterpret_cast<VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR*>( this );
  55582. }
  55583. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55584. auto operator<=>( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& ) const = default;
  55585. #else
  55586. bool operator==( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  55587. {
  55588. return ( sType == rhs.sType )
  55589. && ( pNext == rhs.pNext )
  55590. && ( shaderTerminateInvocation == rhs.shaderTerminateInvocation );
  55591. }
  55592. bool operator!=( PhysicalDeviceShaderTerminateInvocationFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  55593. {
  55594. return !operator==( rhs );
  55595. }
  55596. #endif
  55597. public:
  55598. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR;
  55599. void* pNext = {};
  55600. VULKAN_HPP_NAMESPACE::Bool32 shaderTerminateInvocation = {};
  55601. };
  55602. static_assert( sizeof( PhysicalDeviceShaderTerminateInvocationFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR ), "struct and wrapper have different size!" );
  55603. static_assert( std::is_standard_layout<PhysicalDeviceShaderTerminateInvocationFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  55604. template <>
  55605. struct CppType<StructureType, StructureType::ePhysicalDeviceShaderTerminateInvocationFeaturesKHR>
  55606. {
  55607. using Type = PhysicalDeviceShaderTerminateInvocationFeaturesKHR;
  55608. };
  55609. struct PhysicalDeviceShadingRateImageFeaturesNV
  55610. {
  55611. static const bool allowDuplicate = false;
  55612. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
  55613. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55614. VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ = {}) VULKAN_HPP_NOEXCEPT
  55615. : shadingRateImage( shadingRateImage_ ), shadingRateCoarseSampleOrder( shadingRateCoarseSampleOrder_ )
  55616. {}
  55617. VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImageFeaturesNV( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55618. PhysicalDeviceShadingRateImageFeaturesNV( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55619. : PhysicalDeviceShadingRateImageFeaturesNV( *reinterpret_cast<PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs ) )
  55620. {}
  55621. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55622. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImageFeaturesNV & operator=( PhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55623. PhysicalDeviceShadingRateImageFeaturesNV & operator=( VkPhysicalDeviceShadingRateImageFeaturesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55624. {
  55625. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV const *>( &rhs );
  55626. return *this;
  55627. }
  55628. PhysicalDeviceShadingRateImageFeaturesNV & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55629. {
  55630. pNext = pNext_;
  55631. return *this;
  55632. }
  55633. PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateImage( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage_ ) VULKAN_HPP_NOEXCEPT
  55634. {
  55635. shadingRateImage = shadingRateImage_;
  55636. return *this;
  55637. }
  55638. PhysicalDeviceShadingRateImageFeaturesNV & setShadingRateCoarseSampleOrder( VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder_ ) VULKAN_HPP_NOEXCEPT
  55639. {
  55640. shadingRateCoarseSampleOrder = shadingRateCoarseSampleOrder_;
  55641. return *this;
  55642. }
  55643. operator VkPhysicalDeviceShadingRateImageFeaturesNV const&() const VULKAN_HPP_NOEXCEPT
  55644. {
  55645. return *reinterpret_cast<const VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
  55646. }
  55647. operator VkPhysicalDeviceShadingRateImageFeaturesNV &() VULKAN_HPP_NOEXCEPT
  55648. {
  55649. return *reinterpret_cast<VkPhysicalDeviceShadingRateImageFeaturesNV*>( this );
  55650. }
  55651. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55652. auto operator<=>( PhysicalDeviceShadingRateImageFeaturesNV const& ) const = default;
  55653. #else
  55654. bool operator==( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55655. {
  55656. return ( sType == rhs.sType )
  55657. && ( pNext == rhs.pNext )
  55658. && ( shadingRateImage == rhs.shadingRateImage )
  55659. && ( shadingRateCoarseSampleOrder == rhs.shadingRateCoarseSampleOrder );
  55660. }
  55661. bool operator!=( PhysicalDeviceShadingRateImageFeaturesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55662. {
  55663. return !operator==( rhs );
  55664. }
  55665. #endif
  55666. public:
  55667. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImageFeaturesNV;
  55668. void* pNext = {};
  55669. VULKAN_HPP_NAMESPACE::Bool32 shadingRateImage = {};
  55670. VULKAN_HPP_NAMESPACE::Bool32 shadingRateCoarseSampleOrder = {};
  55671. };
  55672. static_assert( sizeof( PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ), "struct and wrapper have different size!" );
  55673. static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImageFeaturesNV>::value, "struct wrapper is not a standard layout!" );
  55674. template <>
  55675. struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImageFeaturesNV>
  55676. {
  55677. using Type = PhysicalDeviceShadingRateImageFeaturesNV;
  55678. };
  55679. struct PhysicalDeviceShadingRateImagePropertiesNV
  55680. {
  55681. static const bool allowDuplicate = false;
  55682. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
  55683. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55684. VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV(VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize_ = {}, uint32_t shadingRatePaletteSize_ = {}, uint32_t shadingRateMaxCoarseSamples_ = {}) VULKAN_HPP_NOEXCEPT
  55685. : shadingRateTexelSize( shadingRateTexelSize_ ), shadingRatePaletteSize( shadingRatePaletteSize_ ), shadingRateMaxCoarseSamples( shadingRateMaxCoarseSamples_ )
  55686. {}
  55687. VULKAN_HPP_CONSTEXPR PhysicalDeviceShadingRateImagePropertiesNV( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55688. PhysicalDeviceShadingRateImagePropertiesNV( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55689. : PhysicalDeviceShadingRateImagePropertiesNV( *reinterpret_cast<PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs ) )
  55690. {}
  55691. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55692. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceShadingRateImagePropertiesNV & operator=( PhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55693. PhysicalDeviceShadingRateImagePropertiesNV & operator=( VkPhysicalDeviceShadingRateImagePropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  55694. {
  55695. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV const *>( &rhs );
  55696. return *this;
  55697. }
  55698. operator VkPhysicalDeviceShadingRateImagePropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  55699. {
  55700. return *reinterpret_cast<const VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
  55701. }
  55702. operator VkPhysicalDeviceShadingRateImagePropertiesNV &() VULKAN_HPP_NOEXCEPT
  55703. {
  55704. return *reinterpret_cast<VkPhysicalDeviceShadingRateImagePropertiesNV*>( this );
  55705. }
  55706. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55707. auto operator<=>( PhysicalDeviceShadingRateImagePropertiesNV const& ) const = default;
  55708. #else
  55709. bool operator==( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55710. {
  55711. return ( sType == rhs.sType )
  55712. && ( pNext == rhs.pNext )
  55713. && ( shadingRateTexelSize == rhs.shadingRateTexelSize )
  55714. && ( shadingRatePaletteSize == rhs.shadingRatePaletteSize )
  55715. && ( shadingRateMaxCoarseSamples == rhs.shadingRateMaxCoarseSamples );
  55716. }
  55717. bool operator!=( PhysicalDeviceShadingRateImagePropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  55718. {
  55719. return !operator==( rhs );
  55720. }
  55721. #endif
  55722. public:
  55723. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceShadingRateImagePropertiesNV;
  55724. void* pNext = {};
  55725. VULKAN_HPP_NAMESPACE::Extent2D shadingRateTexelSize = {};
  55726. uint32_t shadingRatePaletteSize = {};
  55727. uint32_t shadingRateMaxCoarseSamples = {};
  55728. };
  55729. static_assert( sizeof( PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ), "struct and wrapper have different size!" );
  55730. static_assert( std::is_standard_layout<PhysicalDeviceShadingRateImagePropertiesNV>::value, "struct wrapper is not a standard layout!" );
  55731. template <>
  55732. struct CppType<StructureType, StructureType::ePhysicalDeviceShadingRateImagePropertiesNV>
  55733. {
  55734. using Type = PhysicalDeviceShadingRateImagePropertiesNV;
  55735. };
  55736. struct PhysicalDeviceSubgroupProperties
  55737. {
  55738. static const bool allowDuplicate = false;
  55739. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupProperties;
  55740. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55741. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties(uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages_ = {}) VULKAN_HPP_NOEXCEPT
  55742. : subgroupSize( subgroupSize_ ), supportedStages( supportedStages_ ), supportedOperations( supportedOperations_ ), quadOperationsInAllStages( quadOperationsInAllStages_ )
  55743. {}
  55744. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupProperties( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55745. PhysicalDeviceSubgroupProperties( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  55746. : PhysicalDeviceSubgroupProperties( *reinterpret_cast<PhysicalDeviceSubgroupProperties const *>( &rhs ) )
  55747. {}
  55748. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55749. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupProperties & operator=( PhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55750. PhysicalDeviceSubgroupProperties & operator=( VkPhysicalDeviceSubgroupProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  55751. {
  55752. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties const *>( &rhs );
  55753. return *this;
  55754. }
  55755. operator VkPhysicalDeviceSubgroupProperties const&() const VULKAN_HPP_NOEXCEPT
  55756. {
  55757. return *reinterpret_cast<const VkPhysicalDeviceSubgroupProperties*>( this );
  55758. }
  55759. operator VkPhysicalDeviceSubgroupProperties &() VULKAN_HPP_NOEXCEPT
  55760. {
  55761. return *reinterpret_cast<VkPhysicalDeviceSubgroupProperties*>( this );
  55762. }
  55763. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55764. auto operator<=>( PhysicalDeviceSubgroupProperties const& ) const = default;
  55765. #else
  55766. bool operator==( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  55767. {
  55768. return ( sType == rhs.sType )
  55769. && ( pNext == rhs.pNext )
  55770. && ( subgroupSize == rhs.subgroupSize )
  55771. && ( supportedStages == rhs.supportedStages )
  55772. && ( supportedOperations == rhs.supportedOperations )
  55773. && ( quadOperationsInAllStages == rhs.quadOperationsInAllStages );
  55774. }
  55775. bool operator!=( PhysicalDeviceSubgroupProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  55776. {
  55777. return !operator==( rhs );
  55778. }
  55779. #endif
  55780. public:
  55781. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupProperties;
  55782. void* pNext = {};
  55783. uint32_t subgroupSize = {};
  55784. VULKAN_HPP_NAMESPACE::ShaderStageFlags supportedStages = {};
  55785. VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags supportedOperations = {};
  55786. VULKAN_HPP_NAMESPACE::Bool32 quadOperationsInAllStages = {};
  55787. };
  55788. static_assert( sizeof( PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ), "struct and wrapper have different size!" );
  55789. static_assert( std::is_standard_layout<PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
  55790. template <>
  55791. struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupProperties>
  55792. {
  55793. using Type = PhysicalDeviceSubgroupProperties;
  55794. };
  55795. struct PhysicalDeviceSubgroupSizeControlFeaturesEXT
  55796. {
  55797. static const bool allowDuplicate = false;
  55798. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
  55799. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55800. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ = {}, VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ = {}) VULKAN_HPP_NOEXCEPT
  55801. : subgroupSizeControl( subgroupSizeControl_ ), computeFullSubgroups( computeFullSubgroups_ )
  55802. {}
  55803. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlFeaturesEXT( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55804. PhysicalDeviceSubgroupSizeControlFeaturesEXT( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55805. : PhysicalDeviceSubgroupSizeControlFeaturesEXT( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs ) )
  55806. {}
  55807. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55808. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55809. PhysicalDeviceSubgroupSizeControlFeaturesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55810. {
  55811. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT const *>( &rhs );
  55812. return *this;
  55813. }
  55814. PhysicalDeviceSubgroupSizeControlFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55815. {
  55816. pNext = pNext_;
  55817. return *this;
  55818. }
  55819. PhysicalDeviceSubgroupSizeControlFeaturesEXT & setSubgroupSizeControl( VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl_ ) VULKAN_HPP_NOEXCEPT
  55820. {
  55821. subgroupSizeControl = subgroupSizeControl_;
  55822. return *this;
  55823. }
  55824. PhysicalDeviceSubgroupSizeControlFeaturesEXT & setComputeFullSubgroups( VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups_ ) VULKAN_HPP_NOEXCEPT
  55825. {
  55826. computeFullSubgroups = computeFullSubgroups_;
  55827. return *this;
  55828. }
  55829. operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  55830. {
  55831. return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
  55832. }
  55833. operator VkPhysicalDeviceSubgroupSizeControlFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  55834. {
  55835. return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlFeaturesEXT*>( this );
  55836. }
  55837. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55838. auto operator<=>( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& ) const = default;
  55839. #else
  55840. bool operator==( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55841. {
  55842. return ( sType == rhs.sType )
  55843. && ( pNext == rhs.pNext )
  55844. && ( subgroupSizeControl == rhs.subgroupSizeControl )
  55845. && ( computeFullSubgroups == rhs.computeFullSubgroups );
  55846. }
  55847. bool operator!=( PhysicalDeviceSubgroupSizeControlFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55848. {
  55849. return !operator==( rhs );
  55850. }
  55851. #endif
  55852. public:
  55853. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT;
  55854. void* pNext = {};
  55855. VULKAN_HPP_NAMESPACE::Bool32 subgroupSizeControl = {};
  55856. VULKAN_HPP_NAMESPACE::Bool32 computeFullSubgroups = {};
  55857. };
  55858. static_assert( sizeof( PhysicalDeviceSubgroupSizeControlFeaturesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeaturesEXT ), "struct and wrapper have different size!" );
  55859. static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  55860. template <>
  55861. struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlFeaturesEXT>
  55862. {
  55863. using Type = PhysicalDeviceSubgroupSizeControlFeaturesEXT;
  55864. };
  55865. struct PhysicalDeviceSubgroupSizeControlPropertiesEXT
  55866. {
  55867. static const bool allowDuplicate = false;
  55868. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
  55869. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55870. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT(uint32_t minSubgroupSize_ = {}, uint32_t maxSubgroupSize_ = {}, uint32_t maxComputeWorkgroupSubgroups_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages_ = {}) VULKAN_HPP_NOEXCEPT
  55871. : minSubgroupSize( minSubgroupSize_ ), maxSubgroupSize( maxSubgroupSize_ ), maxComputeWorkgroupSubgroups( maxComputeWorkgroupSubgroups_ ), requiredSubgroupSizeStages( requiredSubgroupSizeStages_ )
  55872. {}
  55873. VULKAN_HPP_CONSTEXPR PhysicalDeviceSubgroupSizeControlPropertiesEXT( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55874. PhysicalDeviceSubgroupSizeControlPropertiesEXT( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55875. : PhysicalDeviceSubgroupSizeControlPropertiesEXT( *reinterpret_cast<PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs ) )
  55876. {}
  55877. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55878. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55879. PhysicalDeviceSubgroupSizeControlPropertiesEXT & operator=( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55880. {
  55881. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT const *>( &rhs );
  55882. return *this;
  55883. }
  55884. operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  55885. {
  55886. return *reinterpret_cast<const VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
  55887. }
  55888. operator VkPhysicalDeviceSubgroupSizeControlPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  55889. {
  55890. return *reinterpret_cast<VkPhysicalDeviceSubgroupSizeControlPropertiesEXT*>( this );
  55891. }
  55892. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55893. auto operator<=>( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& ) const = default;
  55894. #else
  55895. bool operator==( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55896. {
  55897. return ( sType == rhs.sType )
  55898. && ( pNext == rhs.pNext )
  55899. && ( minSubgroupSize == rhs.minSubgroupSize )
  55900. && ( maxSubgroupSize == rhs.maxSubgroupSize )
  55901. && ( maxComputeWorkgroupSubgroups == rhs.maxComputeWorkgroupSubgroups )
  55902. && ( requiredSubgroupSizeStages == rhs.requiredSubgroupSizeStages );
  55903. }
  55904. bool operator!=( PhysicalDeviceSubgroupSizeControlPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55905. {
  55906. return !operator==( rhs );
  55907. }
  55908. #endif
  55909. public:
  55910. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT;
  55911. void* pNext = {};
  55912. uint32_t minSubgroupSize = {};
  55913. uint32_t maxSubgroupSize = {};
  55914. uint32_t maxComputeWorkgroupSubgroups = {};
  55915. VULKAN_HPP_NAMESPACE::ShaderStageFlags requiredSubgroupSizeStages = {};
  55916. };
  55917. static_assert( sizeof( PhysicalDeviceSubgroupSizeControlPropertiesEXT ) == sizeof( VkPhysicalDeviceSubgroupSizeControlPropertiesEXT ), "struct and wrapper have different size!" );
  55918. static_assert( std::is_standard_layout<PhysicalDeviceSubgroupSizeControlPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  55919. template <>
  55920. struct CppType<StructureType, StructureType::ePhysicalDeviceSubgroupSizeControlPropertiesEXT>
  55921. {
  55922. using Type = PhysicalDeviceSubgroupSizeControlPropertiesEXT;
  55923. };
  55924. struct PhysicalDeviceTexelBufferAlignmentFeaturesEXT
  55925. {
  55926. static const bool allowDuplicate = false;
  55927. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  55928. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55929. VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  55930. : texelBufferAlignment( texelBufferAlignment_ )
  55931. {}
  55932. VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentFeaturesEXT( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55933. PhysicalDeviceTexelBufferAlignmentFeaturesEXT( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55934. : PhysicalDeviceTexelBufferAlignmentFeaturesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs ) )
  55935. {}
  55936. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55937. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55938. PhysicalDeviceTexelBufferAlignmentFeaturesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55939. {
  55940. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT const *>( &rhs );
  55941. return *this;
  55942. }
  55943. PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  55944. {
  55945. pNext = pNext_;
  55946. return *this;
  55947. }
  55948. PhysicalDeviceTexelBufferAlignmentFeaturesEXT & setTexelBufferAlignment( VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment_ ) VULKAN_HPP_NOEXCEPT
  55949. {
  55950. texelBufferAlignment = texelBufferAlignment_;
  55951. return *this;
  55952. }
  55953. operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  55954. {
  55955. return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
  55956. }
  55957. operator VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  55958. {
  55959. return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT*>( this );
  55960. }
  55961. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  55962. auto operator<=>( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& ) const = default;
  55963. #else
  55964. bool operator==( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55965. {
  55966. return ( sType == rhs.sType )
  55967. && ( pNext == rhs.pNext )
  55968. && ( texelBufferAlignment == rhs.texelBufferAlignment );
  55969. }
  55970. bool operator!=( PhysicalDeviceTexelBufferAlignmentFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  55971. {
  55972. return !operator==( rhs );
  55973. }
  55974. #endif
  55975. public:
  55976. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  55977. void* pNext = {};
  55978. VULKAN_HPP_NAMESPACE::Bool32 texelBufferAlignment = {};
  55979. };
  55980. static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ), "struct and wrapper have different size!" );
  55981. static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  55982. template <>
  55983. struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentFeaturesEXT>
  55984. {
  55985. using Type = PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
  55986. };
  55987. struct PhysicalDeviceTexelBufferAlignmentPropertiesEXT
  55988. {
  55989. static const bool allowDuplicate = false;
  55990. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
  55991. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  55992. VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT(VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment_ = {}) VULKAN_HPP_NOEXCEPT
  55993. : storageTexelBufferOffsetAlignmentBytes( storageTexelBufferOffsetAlignmentBytes_ ), storageTexelBufferOffsetSingleTexelAlignment( storageTexelBufferOffsetSingleTexelAlignment_ ), uniformTexelBufferOffsetAlignmentBytes( uniformTexelBufferOffsetAlignmentBytes_ ), uniformTexelBufferOffsetSingleTexelAlignment( uniformTexelBufferOffsetSingleTexelAlignment_ )
  55994. {}
  55995. VULKAN_HPP_CONSTEXPR PhysicalDeviceTexelBufferAlignmentPropertiesEXT( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  55996. PhysicalDeviceTexelBufferAlignmentPropertiesEXT( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  55997. : PhysicalDeviceTexelBufferAlignmentPropertiesEXT( *reinterpret_cast<PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs ) )
  55998. {}
  55999. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56000. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56001. PhysicalDeviceTexelBufferAlignmentPropertiesEXT & operator=( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56002. {
  56003. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT const *>( &rhs );
  56004. return *this;
  56005. }
  56006. operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  56007. {
  56008. return *reinterpret_cast<const VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
  56009. }
  56010. operator VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  56011. {
  56012. return *reinterpret_cast<VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT*>( this );
  56013. }
  56014. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56015. auto operator<=>( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& ) const = default;
  56016. #else
  56017. bool operator==( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56018. {
  56019. return ( sType == rhs.sType )
  56020. && ( pNext == rhs.pNext )
  56021. && ( storageTexelBufferOffsetAlignmentBytes == rhs.storageTexelBufferOffsetAlignmentBytes )
  56022. && ( storageTexelBufferOffsetSingleTexelAlignment == rhs.storageTexelBufferOffsetSingleTexelAlignment )
  56023. && ( uniformTexelBufferOffsetAlignmentBytes == rhs.uniformTexelBufferOffsetAlignmentBytes )
  56024. && ( uniformTexelBufferOffsetSingleTexelAlignment == rhs.uniformTexelBufferOffsetSingleTexelAlignment );
  56025. }
  56026. bool operator!=( PhysicalDeviceTexelBufferAlignmentPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56027. {
  56028. return !operator==( rhs );
  56029. }
  56030. #endif
  56031. public:
  56032. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT;
  56033. void* pNext = {};
  56034. VULKAN_HPP_NAMESPACE::DeviceSize storageTexelBufferOffsetAlignmentBytes = {};
  56035. VULKAN_HPP_NAMESPACE::Bool32 storageTexelBufferOffsetSingleTexelAlignment = {};
  56036. VULKAN_HPP_NAMESPACE::DeviceSize uniformTexelBufferOffsetAlignmentBytes = {};
  56037. VULKAN_HPP_NAMESPACE::Bool32 uniformTexelBufferOffsetSingleTexelAlignment = {};
  56038. };
  56039. static_assert( sizeof( PhysicalDeviceTexelBufferAlignmentPropertiesEXT ) == sizeof( VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT ), "struct and wrapper have different size!" );
  56040. static_assert( std::is_standard_layout<PhysicalDeviceTexelBufferAlignmentPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  56041. template <>
  56042. struct CppType<StructureType, StructureType::ePhysicalDeviceTexelBufferAlignmentPropertiesEXT>
  56043. {
  56044. using Type = PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
  56045. };
  56046. struct PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT
  56047. {
  56048. static const bool allowDuplicate = false;
  56049. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
  56050. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56051. VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ = {}) VULKAN_HPP_NOEXCEPT
  56052. : textureCompressionASTC_HDR( textureCompressionASTC_HDR_ )
  56053. {}
  56054. VULKAN_HPP_CONSTEXPR PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56055. PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56056. : PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT( *reinterpret_cast<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs ) )
  56057. {}
  56058. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56059. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56060. PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & operator=( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56061. {
  56062. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const *>( &rhs );
  56063. return *this;
  56064. }
  56065. PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56066. {
  56067. pNext = pNext_;
  56068. return *this;
  56069. }
  56070. PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT & setTextureCompressionASTC_HDR( VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR_ ) VULKAN_HPP_NOEXCEPT
  56071. {
  56072. textureCompressionASTC_HDR = textureCompressionASTC_HDR_;
  56073. return *this;
  56074. }
  56075. operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  56076. {
  56077. return *reinterpret_cast<const VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
  56078. }
  56079. operator VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  56080. {
  56081. return *reinterpret_cast<VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT*>( this );
  56082. }
  56083. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56084. auto operator<=>( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& ) const = default;
  56085. #else
  56086. bool operator==( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56087. {
  56088. return ( sType == rhs.sType )
  56089. && ( pNext == rhs.pNext )
  56090. && ( textureCompressionASTC_HDR == rhs.textureCompressionASTC_HDR );
  56091. }
  56092. bool operator!=( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56093. {
  56094. return !operator==( rhs );
  56095. }
  56096. #endif
  56097. public:
  56098. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT;
  56099. void* pNext = {};
  56100. VULKAN_HPP_NAMESPACE::Bool32 textureCompressionASTC_HDR = {};
  56101. };
  56102. static_assert( sizeof( PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ) == sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT ), "struct and wrapper have different size!" );
  56103. static_assert( std::is_standard_layout<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  56104. template <>
  56105. struct CppType<StructureType, StructureType::ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT>
  56106. {
  56107. using Type = PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
  56108. };
  56109. struct PhysicalDeviceTimelineSemaphoreFeatures
  56110. {
  56111. static const bool allowDuplicate = false;
  56112. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
  56113. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56114. VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures(VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}) VULKAN_HPP_NOEXCEPT
  56115. : timelineSemaphore( timelineSemaphore_ )
  56116. {}
  56117. VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreFeatures( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56118. PhysicalDeviceTimelineSemaphoreFeatures( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56119. : PhysicalDeviceTimelineSemaphoreFeatures( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs ) )
  56120. {}
  56121. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56122. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreFeatures & operator=( PhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56123. PhysicalDeviceTimelineSemaphoreFeatures & operator=( VkPhysicalDeviceTimelineSemaphoreFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56124. {
  56125. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures const *>( &rhs );
  56126. return *this;
  56127. }
  56128. PhysicalDeviceTimelineSemaphoreFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56129. {
  56130. pNext = pNext_;
  56131. return *this;
  56132. }
  56133. PhysicalDeviceTimelineSemaphoreFeatures & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
  56134. {
  56135. timelineSemaphore = timelineSemaphore_;
  56136. return *this;
  56137. }
  56138. operator VkPhysicalDeviceTimelineSemaphoreFeatures const&() const VULKAN_HPP_NOEXCEPT
  56139. {
  56140. return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
  56141. }
  56142. operator VkPhysicalDeviceTimelineSemaphoreFeatures &() VULKAN_HPP_NOEXCEPT
  56143. {
  56144. return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreFeatures*>( this );
  56145. }
  56146. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56147. auto operator<=>( PhysicalDeviceTimelineSemaphoreFeatures const& ) const = default;
  56148. #else
  56149. bool operator==( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56150. {
  56151. return ( sType == rhs.sType )
  56152. && ( pNext == rhs.pNext )
  56153. && ( timelineSemaphore == rhs.timelineSemaphore );
  56154. }
  56155. bool operator!=( PhysicalDeviceTimelineSemaphoreFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56156. {
  56157. return !operator==( rhs );
  56158. }
  56159. #endif
  56160. public:
  56161. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreFeatures;
  56162. void* pNext = {};
  56163. VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
  56164. };
  56165. static_assert( sizeof( PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ), "struct and wrapper have different size!" );
  56166. static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreFeatures>::value, "struct wrapper is not a standard layout!" );
  56167. template <>
  56168. struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreFeatures>
  56169. {
  56170. using Type = PhysicalDeviceTimelineSemaphoreFeatures;
  56171. };
  56172. using PhysicalDeviceTimelineSemaphoreFeaturesKHR = PhysicalDeviceTimelineSemaphoreFeatures;
  56173. struct PhysicalDeviceTimelineSemaphoreProperties
  56174. {
  56175. static const bool allowDuplicate = false;
  56176. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
  56177. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56178. VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties(uint64_t maxTimelineSemaphoreValueDifference_ = {}) VULKAN_HPP_NOEXCEPT
  56179. : maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ )
  56180. {}
  56181. VULKAN_HPP_CONSTEXPR PhysicalDeviceTimelineSemaphoreProperties( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56182. PhysicalDeviceTimelineSemaphoreProperties( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  56183. : PhysicalDeviceTimelineSemaphoreProperties( *reinterpret_cast<PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs ) )
  56184. {}
  56185. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56186. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTimelineSemaphoreProperties & operator=( PhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56187. PhysicalDeviceTimelineSemaphoreProperties & operator=( VkPhysicalDeviceTimelineSemaphoreProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  56188. {
  56189. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties const *>( &rhs );
  56190. return *this;
  56191. }
  56192. operator VkPhysicalDeviceTimelineSemaphoreProperties const&() const VULKAN_HPP_NOEXCEPT
  56193. {
  56194. return *reinterpret_cast<const VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
  56195. }
  56196. operator VkPhysicalDeviceTimelineSemaphoreProperties &() VULKAN_HPP_NOEXCEPT
  56197. {
  56198. return *reinterpret_cast<VkPhysicalDeviceTimelineSemaphoreProperties*>( this );
  56199. }
  56200. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56201. auto operator<=>( PhysicalDeviceTimelineSemaphoreProperties const& ) const = default;
  56202. #else
  56203. bool operator==( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  56204. {
  56205. return ( sType == rhs.sType )
  56206. && ( pNext == rhs.pNext )
  56207. && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference );
  56208. }
  56209. bool operator!=( PhysicalDeviceTimelineSemaphoreProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  56210. {
  56211. return !operator==( rhs );
  56212. }
  56213. #endif
  56214. public:
  56215. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTimelineSemaphoreProperties;
  56216. void* pNext = {};
  56217. uint64_t maxTimelineSemaphoreValueDifference = {};
  56218. };
  56219. static_assert( sizeof( PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ), "struct and wrapper have different size!" );
  56220. static_assert( std::is_standard_layout<PhysicalDeviceTimelineSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
  56221. template <>
  56222. struct CppType<StructureType, StructureType::ePhysicalDeviceTimelineSemaphoreProperties>
  56223. {
  56224. using Type = PhysicalDeviceTimelineSemaphoreProperties;
  56225. };
  56226. using PhysicalDeviceTimelineSemaphorePropertiesKHR = PhysicalDeviceTimelineSemaphoreProperties;
  56227. struct PhysicalDeviceTransformFeedbackFeaturesEXT
  56228. {
  56229. static const bool allowDuplicate = false;
  56230. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
  56231. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56232. VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ = {}, VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ = {}) VULKAN_HPP_NOEXCEPT
  56233. : transformFeedback( transformFeedback_ ), geometryStreams( geometryStreams_ )
  56234. {}
  56235. VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackFeaturesEXT( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56236. PhysicalDeviceTransformFeedbackFeaturesEXT( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56237. : PhysicalDeviceTransformFeedbackFeaturesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs ) )
  56238. {}
  56239. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56240. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( PhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56241. PhysicalDeviceTransformFeedbackFeaturesEXT & operator=( VkPhysicalDeviceTransformFeedbackFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56242. {
  56243. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT const *>( &rhs );
  56244. return *this;
  56245. }
  56246. PhysicalDeviceTransformFeedbackFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56247. {
  56248. pNext = pNext_;
  56249. return *this;
  56250. }
  56251. PhysicalDeviceTransformFeedbackFeaturesEXT & setTransformFeedback( VULKAN_HPP_NAMESPACE::Bool32 transformFeedback_ ) VULKAN_HPP_NOEXCEPT
  56252. {
  56253. transformFeedback = transformFeedback_;
  56254. return *this;
  56255. }
  56256. PhysicalDeviceTransformFeedbackFeaturesEXT & setGeometryStreams( VULKAN_HPP_NAMESPACE::Bool32 geometryStreams_ ) VULKAN_HPP_NOEXCEPT
  56257. {
  56258. geometryStreams = geometryStreams_;
  56259. return *this;
  56260. }
  56261. operator VkPhysicalDeviceTransformFeedbackFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  56262. {
  56263. return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
  56264. }
  56265. operator VkPhysicalDeviceTransformFeedbackFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  56266. {
  56267. return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackFeaturesEXT*>( this );
  56268. }
  56269. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56270. auto operator<=>( PhysicalDeviceTransformFeedbackFeaturesEXT const& ) const = default;
  56271. #else
  56272. bool operator==( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56273. {
  56274. return ( sType == rhs.sType )
  56275. && ( pNext == rhs.pNext )
  56276. && ( transformFeedback == rhs.transformFeedback )
  56277. && ( geometryStreams == rhs.geometryStreams );
  56278. }
  56279. bool operator!=( PhysicalDeviceTransformFeedbackFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56280. {
  56281. return !operator==( rhs );
  56282. }
  56283. #endif
  56284. public:
  56285. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT;
  56286. void* pNext = {};
  56287. VULKAN_HPP_NAMESPACE::Bool32 transformFeedback = {};
  56288. VULKAN_HPP_NAMESPACE::Bool32 geometryStreams = {};
  56289. };
  56290. static_assert( sizeof( PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ), "struct and wrapper have different size!" );
  56291. static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  56292. template <>
  56293. struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackFeaturesEXT>
  56294. {
  56295. using Type = PhysicalDeviceTransformFeedbackFeaturesEXT;
  56296. };
  56297. struct PhysicalDeviceTransformFeedbackPropertiesEXT
  56298. {
  56299. static const bool allowDuplicate = false;
  56300. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
  56301. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56302. VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT(uint32_t maxTransformFeedbackStreams_ = {}, uint32_t maxTransformFeedbackBuffers_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize_ = {}, uint32_t maxTransformFeedbackStreamDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataSize_ = {}, uint32_t maxTransformFeedbackBufferDataStride_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect_ = {}, VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw_ = {}) VULKAN_HPP_NOEXCEPT
  56303. : maxTransformFeedbackStreams( maxTransformFeedbackStreams_ ), maxTransformFeedbackBuffers( maxTransformFeedbackBuffers_ ), maxTransformFeedbackBufferSize( maxTransformFeedbackBufferSize_ ), maxTransformFeedbackStreamDataSize( maxTransformFeedbackStreamDataSize_ ), maxTransformFeedbackBufferDataSize( maxTransformFeedbackBufferDataSize_ ), maxTransformFeedbackBufferDataStride( maxTransformFeedbackBufferDataStride_ ), transformFeedbackQueries( transformFeedbackQueries_ ), transformFeedbackStreamsLinesTriangles( transformFeedbackStreamsLinesTriangles_ ), transformFeedbackRasterizationStreamSelect( transformFeedbackRasterizationStreamSelect_ ), transformFeedbackDraw( transformFeedbackDraw_ )
  56304. {}
  56305. VULKAN_HPP_CONSTEXPR PhysicalDeviceTransformFeedbackPropertiesEXT( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56306. PhysicalDeviceTransformFeedbackPropertiesEXT( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56307. : PhysicalDeviceTransformFeedbackPropertiesEXT( *reinterpret_cast<PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs ) )
  56308. {}
  56309. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56310. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( PhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56311. PhysicalDeviceTransformFeedbackPropertiesEXT & operator=( VkPhysicalDeviceTransformFeedbackPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56312. {
  56313. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT const *>( &rhs );
  56314. return *this;
  56315. }
  56316. operator VkPhysicalDeviceTransformFeedbackPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  56317. {
  56318. return *reinterpret_cast<const VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
  56319. }
  56320. operator VkPhysicalDeviceTransformFeedbackPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  56321. {
  56322. return *reinterpret_cast<VkPhysicalDeviceTransformFeedbackPropertiesEXT*>( this );
  56323. }
  56324. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56325. auto operator<=>( PhysicalDeviceTransformFeedbackPropertiesEXT const& ) const = default;
  56326. #else
  56327. bool operator==( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56328. {
  56329. return ( sType == rhs.sType )
  56330. && ( pNext == rhs.pNext )
  56331. && ( maxTransformFeedbackStreams == rhs.maxTransformFeedbackStreams )
  56332. && ( maxTransformFeedbackBuffers == rhs.maxTransformFeedbackBuffers )
  56333. && ( maxTransformFeedbackBufferSize == rhs.maxTransformFeedbackBufferSize )
  56334. && ( maxTransformFeedbackStreamDataSize == rhs.maxTransformFeedbackStreamDataSize )
  56335. && ( maxTransformFeedbackBufferDataSize == rhs.maxTransformFeedbackBufferDataSize )
  56336. && ( maxTransformFeedbackBufferDataStride == rhs.maxTransformFeedbackBufferDataStride )
  56337. && ( transformFeedbackQueries == rhs.transformFeedbackQueries )
  56338. && ( transformFeedbackStreamsLinesTriangles == rhs.transformFeedbackStreamsLinesTriangles )
  56339. && ( transformFeedbackRasterizationStreamSelect == rhs.transformFeedbackRasterizationStreamSelect )
  56340. && ( transformFeedbackDraw == rhs.transformFeedbackDraw );
  56341. }
  56342. bool operator!=( PhysicalDeviceTransformFeedbackPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56343. {
  56344. return !operator==( rhs );
  56345. }
  56346. #endif
  56347. public:
  56348. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT;
  56349. void* pNext = {};
  56350. uint32_t maxTransformFeedbackStreams = {};
  56351. uint32_t maxTransformFeedbackBuffers = {};
  56352. VULKAN_HPP_NAMESPACE::DeviceSize maxTransformFeedbackBufferSize = {};
  56353. uint32_t maxTransformFeedbackStreamDataSize = {};
  56354. uint32_t maxTransformFeedbackBufferDataSize = {};
  56355. uint32_t maxTransformFeedbackBufferDataStride = {};
  56356. VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackQueries = {};
  56357. VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackStreamsLinesTriangles = {};
  56358. VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackRasterizationStreamSelect = {};
  56359. VULKAN_HPP_NAMESPACE::Bool32 transformFeedbackDraw = {};
  56360. };
  56361. static_assert( sizeof( PhysicalDeviceTransformFeedbackPropertiesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ), "struct and wrapper have different size!" );
  56362. static_assert( std::is_standard_layout<PhysicalDeviceTransformFeedbackPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  56363. template <>
  56364. struct CppType<StructureType, StructureType::ePhysicalDeviceTransformFeedbackPropertiesEXT>
  56365. {
  56366. using Type = PhysicalDeviceTransformFeedbackPropertiesEXT;
  56367. };
  56368. struct PhysicalDeviceUniformBufferStandardLayoutFeatures
  56369. {
  56370. static const bool allowDuplicate = false;
  56371. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
  56372. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56373. VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures(VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}) VULKAN_HPP_NOEXCEPT
  56374. : uniformBufferStandardLayout( uniformBufferStandardLayout_ )
  56375. {}
  56376. VULKAN_HPP_CONSTEXPR PhysicalDeviceUniformBufferStandardLayoutFeatures( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56377. PhysicalDeviceUniformBufferStandardLayoutFeatures( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56378. : PhysicalDeviceUniformBufferStandardLayoutFeatures( *reinterpret_cast<PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs ) )
  56379. {}
  56380. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56381. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( PhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56382. PhysicalDeviceUniformBufferStandardLayoutFeatures & operator=( VkPhysicalDeviceUniformBufferStandardLayoutFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56383. {
  56384. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures const *>( &rhs );
  56385. return *this;
  56386. }
  56387. PhysicalDeviceUniformBufferStandardLayoutFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56388. {
  56389. pNext = pNext_;
  56390. return *this;
  56391. }
  56392. PhysicalDeviceUniformBufferStandardLayoutFeatures & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
  56393. {
  56394. uniformBufferStandardLayout = uniformBufferStandardLayout_;
  56395. return *this;
  56396. }
  56397. operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures const&() const VULKAN_HPP_NOEXCEPT
  56398. {
  56399. return *reinterpret_cast<const VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
  56400. }
  56401. operator VkPhysicalDeviceUniformBufferStandardLayoutFeatures &() VULKAN_HPP_NOEXCEPT
  56402. {
  56403. return *reinterpret_cast<VkPhysicalDeviceUniformBufferStandardLayoutFeatures*>( this );
  56404. }
  56405. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56406. auto operator<=>( PhysicalDeviceUniformBufferStandardLayoutFeatures const& ) const = default;
  56407. #else
  56408. bool operator==( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56409. {
  56410. return ( sType == rhs.sType )
  56411. && ( pNext == rhs.pNext )
  56412. && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout );
  56413. }
  56414. bool operator!=( PhysicalDeviceUniformBufferStandardLayoutFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56415. {
  56416. return !operator==( rhs );
  56417. }
  56418. #endif
  56419. public:
  56420. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures;
  56421. void* pNext = {};
  56422. VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
  56423. };
  56424. static_assert( sizeof( PhysicalDeviceUniformBufferStandardLayoutFeatures ) == sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ), "struct and wrapper have different size!" );
  56425. static_assert( std::is_standard_layout<PhysicalDeviceUniformBufferStandardLayoutFeatures>::value, "struct wrapper is not a standard layout!" );
  56426. template <>
  56427. struct CppType<StructureType, StructureType::ePhysicalDeviceUniformBufferStandardLayoutFeatures>
  56428. {
  56429. using Type = PhysicalDeviceUniformBufferStandardLayoutFeatures;
  56430. };
  56431. using PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = PhysicalDeviceUniformBufferStandardLayoutFeatures;
  56432. struct PhysicalDeviceVariablePointersFeatures
  56433. {
  56434. static const bool allowDuplicate = false;
  56435. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVariablePointersFeatures;
  56436. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56437. VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures(VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}) VULKAN_HPP_NOEXCEPT
  56438. : variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ )
  56439. {}
  56440. VULKAN_HPP_CONSTEXPR PhysicalDeviceVariablePointersFeatures( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56441. PhysicalDeviceVariablePointersFeatures( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56442. : PhysicalDeviceVariablePointersFeatures( *reinterpret_cast<PhysicalDeviceVariablePointersFeatures const *>( &rhs ) )
  56443. {}
  56444. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56445. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVariablePointersFeatures & operator=( PhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56446. PhysicalDeviceVariablePointersFeatures & operator=( VkPhysicalDeviceVariablePointersFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  56447. {
  56448. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures const *>( &rhs );
  56449. return *this;
  56450. }
  56451. PhysicalDeviceVariablePointersFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56452. {
  56453. pNext = pNext_;
  56454. return *this;
  56455. }
  56456. PhysicalDeviceVariablePointersFeatures & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
  56457. {
  56458. variablePointersStorageBuffer = variablePointersStorageBuffer_;
  56459. return *this;
  56460. }
  56461. PhysicalDeviceVariablePointersFeatures & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
  56462. {
  56463. variablePointers = variablePointers_;
  56464. return *this;
  56465. }
  56466. operator VkPhysicalDeviceVariablePointersFeatures const&() const VULKAN_HPP_NOEXCEPT
  56467. {
  56468. return *reinterpret_cast<const VkPhysicalDeviceVariablePointersFeatures*>( this );
  56469. }
  56470. operator VkPhysicalDeviceVariablePointersFeatures &() VULKAN_HPP_NOEXCEPT
  56471. {
  56472. return *reinterpret_cast<VkPhysicalDeviceVariablePointersFeatures*>( this );
  56473. }
  56474. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56475. auto operator<=>( PhysicalDeviceVariablePointersFeatures const& ) const = default;
  56476. #else
  56477. bool operator==( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56478. {
  56479. return ( sType == rhs.sType )
  56480. && ( pNext == rhs.pNext )
  56481. && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
  56482. && ( variablePointers == rhs.variablePointers );
  56483. }
  56484. bool operator!=( PhysicalDeviceVariablePointersFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  56485. {
  56486. return !operator==( rhs );
  56487. }
  56488. #endif
  56489. public:
  56490. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVariablePointersFeatures;
  56491. void* pNext = {};
  56492. VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
  56493. VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
  56494. };
  56495. static_assert( sizeof( PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ), "struct and wrapper have different size!" );
  56496. static_assert( std::is_standard_layout<PhysicalDeviceVariablePointersFeatures>::value, "struct wrapper is not a standard layout!" );
  56497. template <>
  56498. struct CppType<StructureType, StructureType::ePhysicalDeviceVariablePointersFeatures>
  56499. {
  56500. using Type = PhysicalDeviceVariablePointersFeatures;
  56501. };
  56502. using PhysicalDeviceVariablePointerFeatures = PhysicalDeviceVariablePointersFeatures;
  56503. using PhysicalDeviceVariablePointerFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
  56504. using PhysicalDeviceVariablePointersFeaturesKHR = PhysicalDeviceVariablePointersFeatures;
  56505. struct PhysicalDeviceVertexAttributeDivisorFeaturesEXT
  56506. {
  56507. static const bool allowDuplicate = false;
  56508. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
  56509. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56510. VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ = {}) VULKAN_HPP_NOEXCEPT
  56511. : vertexAttributeInstanceRateDivisor( vertexAttributeInstanceRateDivisor_ ), vertexAttributeInstanceRateZeroDivisor( vertexAttributeInstanceRateZeroDivisor_ )
  56512. {}
  56513. VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorFeaturesEXT( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56514. PhysicalDeviceVertexAttributeDivisorFeaturesEXT( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56515. : PhysicalDeviceVertexAttributeDivisorFeaturesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs ) )
  56516. {}
  56517. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56518. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56519. PhysicalDeviceVertexAttributeDivisorFeaturesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56520. {
  56521. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT const *>( &rhs );
  56522. return *this;
  56523. }
  56524. PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56525. {
  56526. pNext = pNext_;
  56527. return *this;
  56528. }
  56529. PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor_ ) VULKAN_HPP_NOEXCEPT
  56530. {
  56531. vertexAttributeInstanceRateDivisor = vertexAttributeInstanceRateDivisor_;
  56532. return *this;
  56533. }
  56534. PhysicalDeviceVertexAttributeDivisorFeaturesEXT & setVertexAttributeInstanceRateZeroDivisor( VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor_ ) VULKAN_HPP_NOEXCEPT
  56535. {
  56536. vertexAttributeInstanceRateZeroDivisor = vertexAttributeInstanceRateZeroDivisor_;
  56537. return *this;
  56538. }
  56539. operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  56540. {
  56541. return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
  56542. }
  56543. operator VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  56544. {
  56545. return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT*>( this );
  56546. }
  56547. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56548. auto operator<=>( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& ) const = default;
  56549. #else
  56550. bool operator==( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56551. {
  56552. return ( sType == rhs.sType )
  56553. && ( pNext == rhs.pNext )
  56554. && ( vertexAttributeInstanceRateDivisor == rhs.vertexAttributeInstanceRateDivisor )
  56555. && ( vertexAttributeInstanceRateZeroDivisor == rhs.vertexAttributeInstanceRateZeroDivisor );
  56556. }
  56557. bool operator!=( PhysicalDeviceVertexAttributeDivisorFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56558. {
  56559. return !operator==( rhs );
  56560. }
  56561. #endif
  56562. public:
  56563. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT;
  56564. void* pNext = {};
  56565. VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateDivisor = {};
  56566. VULKAN_HPP_NAMESPACE::Bool32 vertexAttributeInstanceRateZeroDivisor = {};
  56567. };
  56568. static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ), "struct and wrapper have different size!" );
  56569. static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  56570. template <>
  56571. struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorFeaturesEXT>
  56572. {
  56573. using Type = PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
  56574. };
  56575. struct PhysicalDeviceVertexAttributeDivisorPropertiesEXT
  56576. {
  56577. static const bool allowDuplicate = false;
  56578. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  56579. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56580. VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT(uint32_t maxVertexAttribDivisor_ = {}) VULKAN_HPP_NOEXCEPT
  56581. : maxVertexAttribDivisor( maxVertexAttribDivisor_ )
  56582. {}
  56583. VULKAN_HPP_CONSTEXPR PhysicalDeviceVertexAttributeDivisorPropertiesEXT( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56584. PhysicalDeviceVertexAttributeDivisorPropertiesEXT( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56585. : PhysicalDeviceVertexAttributeDivisorPropertiesEXT( *reinterpret_cast<PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs ) )
  56586. {}
  56587. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56588. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56589. PhysicalDeviceVertexAttributeDivisorPropertiesEXT & operator=( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  56590. {
  56591. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT const *>( &rhs );
  56592. return *this;
  56593. }
  56594. operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT const&() const VULKAN_HPP_NOEXCEPT
  56595. {
  56596. return *reinterpret_cast<const VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
  56597. }
  56598. operator VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT &() VULKAN_HPP_NOEXCEPT
  56599. {
  56600. return *reinterpret_cast<VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT*>( this );
  56601. }
  56602. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56603. auto operator<=>( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& ) const = default;
  56604. #else
  56605. bool operator==( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56606. {
  56607. return ( sType == rhs.sType )
  56608. && ( pNext == rhs.pNext )
  56609. && ( maxVertexAttribDivisor == rhs.maxVertexAttribDivisor );
  56610. }
  56611. bool operator!=( PhysicalDeviceVertexAttributeDivisorPropertiesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  56612. {
  56613. return !operator==( rhs );
  56614. }
  56615. #endif
  56616. public:
  56617. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  56618. void* pNext = {};
  56619. uint32_t maxVertexAttribDivisor = {};
  56620. };
  56621. static_assert( sizeof( PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) == sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ), "struct and wrapper have different size!" );
  56622. static_assert( std::is_standard_layout<PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
  56623. template <>
  56624. struct CppType<StructureType, StructureType::ePhysicalDeviceVertexAttributeDivisorPropertiesEXT>
  56625. {
  56626. using Type = PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
  56627. };
  56628. struct PhysicalDeviceVulkan11Features
  56629. {
  56630. static const bool allowDuplicate = false;
  56631. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Features;
  56632. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56633. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features(VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiview_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ = {}) VULKAN_HPP_NOEXCEPT
  56634. : storageBuffer16BitAccess( storageBuffer16BitAccess_ ), uniformAndStorageBuffer16BitAccess( uniformAndStorageBuffer16BitAccess_ ), storagePushConstant16( storagePushConstant16_ ), storageInputOutput16( storageInputOutput16_ ), multiview( multiview_ ), multiviewGeometryShader( multiviewGeometryShader_ ), multiviewTessellationShader( multiviewTessellationShader_ ), variablePointersStorageBuffer( variablePointersStorageBuffer_ ), variablePointers( variablePointers_ ), protectedMemory( protectedMemory_ ), samplerYcbcrConversion( samplerYcbcrConversion_ ), shaderDrawParameters( shaderDrawParameters_ )
  56635. {}
  56636. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan11Features( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56637. PhysicalDeviceVulkan11Features( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
  56638. : PhysicalDeviceVulkan11Features( *reinterpret_cast<PhysicalDeviceVulkan11Features const *>( &rhs ) )
  56639. {}
  56640. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56641. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Features & operator=( PhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56642. PhysicalDeviceVulkan11Features & operator=( VkPhysicalDeviceVulkan11Features const & rhs ) VULKAN_HPP_NOEXCEPT
  56643. {
  56644. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features const *>( &rhs );
  56645. return *this;
  56646. }
  56647. PhysicalDeviceVulkan11Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56648. {
  56649. pNext = pNext_;
  56650. return *this;
  56651. }
  56652. PhysicalDeviceVulkan11Features & setStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
  56653. {
  56654. storageBuffer16BitAccess = storageBuffer16BitAccess_;
  56655. return *this;
  56656. }
  56657. PhysicalDeviceVulkan11Features & setUniformAndStorageBuffer16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess_ ) VULKAN_HPP_NOEXCEPT
  56658. {
  56659. uniformAndStorageBuffer16BitAccess = uniformAndStorageBuffer16BitAccess_;
  56660. return *this;
  56661. }
  56662. PhysicalDeviceVulkan11Features & setStoragePushConstant16( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16_ ) VULKAN_HPP_NOEXCEPT
  56663. {
  56664. storagePushConstant16 = storagePushConstant16_;
  56665. return *this;
  56666. }
  56667. PhysicalDeviceVulkan11Features & setStorageInputOutput16( VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16_ ) VULKAN_HPP_NOEXCEPT
  56668. {
  56669. storageInputOutput16 = storageInputOutput16_;
  56670. return *this;
  56671. }
  56672. PhysicalDeviceVulkan11Features & setMultiview( VULKAN_HPP_NAMESPACE::Bool32 multiview_ ) VULKAN_HPP_NOEXCEPT
  56673. {
  56674. multiview = multiview_;
  56675. return *this;
  56676. }
  56677. PhysicalDeviceVulkan11Features & setMultiviewGeometryShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader_ ) VULKAN_HPP_NOEXCEPT
  56678. {
  56679. multiviewGeometryShader = multiviewGeometryShader_;
  56680. return *this;
  56681. }
  56682. PhysicalDeviceVulkan11Features & setMultiviewTessellationShader( VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader_ ) VULKAN_HPP_NOEXCEPT
  56683. {
  56684. multiviewTessellationShader = multiviewTessellationShader_;
  56685. return *this;
  56686. }
  56687. PhysicalDeviceVulkan11Features & setVariablePointersStorageBuffer( VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer_ ) VULKAN_HPP_NOEXCEPT
  56688. {
  56689. variablePointersStorageBuffer = variablePointersStorageBuffer_;
  56690. return *this;
  56691. }
  56692. PhysicalDeviceVulkan11Features & setVariablePointers( VULKAN_HPP_NAMESPACE::Bool32 variablePointers_ ) VULKAN_HPP_NOEXCEPT
  56693. {
  56694. variablePointers = variablePointers_;
  56695. return *this;
  56696. }
  56697. PhysicalDeviceVulkan11Features & setProtectedMemory( VULKAN_HPP_NAMESPACE::Bool32 protectedMemory_ ) VULKAN_HPP_NOEXCEPT
  56698. {
  56699. protectedMemory = protectedMemory_;
  56700. return *this;
  56701. }
  56702. PhysicalDeviceVulkan11Features & setSamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion_ ) VULKAN_HPP_NOEXCEPT
  56703. {
  56704. samplerYcbcrConversion = samplerYcbcrConversion_;
  56705. return *this;
  56706. }
  56707. PhysicalDeviceVulkan11Features & setShaderDrawParameters( VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters_ ) VULKAN_HPP_NOEXCEPT
  56708. {
  56709. shaderDrawParameters = shaderDrawParameters_;
  56710. return *this;
  56711. }
  56712. operator VkPhysicalDeviceVulkan11Features const&() const VULKAN_HPP_NOEXCEPT
  56713. {
  56714. return *reinterpret_cast<const VkPhysicalDeviceVulkan11Features*>( this );
  56715. }
  56716. operator VkPhysicalDeviceVulkan11Features &() VULKAN_HPP_NOEXCEPT
  56717. {
  56718. return *reinterpret_cast<VkPhysicalDeviceVulkan11Features*>( this );
  56719. }
  56720. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56721. auto operator<=>( PhysicalDeviceVulkan11Features const& ) const = default;
  56722. #else
  56723. bool operator==( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  56724. {
  56725. return ( sType == rhs.sType )
  56726. && ( pNext == rhs.pNext )
  56727. && ( storageBuffer16BitAccess == rhs.storageBuffer16BitAccess )
  56728. && ( uniformAndStorageBuffer16BitAccess == rhs.uniformAndStorageBuffer16BitAccess )
  56729. && ( storagePushConstant16 == rhs.storagePushConstant16 )
  56730. && ( storageInputOutput16 == rhs.storageInputOutput16 )
  56731. && ( multiview == rhs.multiview )
  56732. && ( multiviewGeometryShader == rhs.multiviewGeometryShader )
  56733. && ( multiviewTessellationShader == rhs.multiviewTessellationShader )
  56734. && ( variablePointersStorageBuffer == rhs.variablePointersStorageBuffer )
  56735. && ( variablePointers == rhs.variablePointers )
  56736. && ( protectedMemory == rhs.protectedMemory )
  56737. && ( samplerYcbcrConversion == rhs.samplerYcbcrConversion )
  56738. && ( shaderDrawParameters == rhs.shaderDrawParameters );
  56739. }
  56740. bool operator!=( PhysicalDeviceVulkan11Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  56741. {
  56742. return !operator==( rhs );
  56743. }
  56744. #endif
  56745. public:
  56746. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Features;
  56747. void* pNext = {};
  56748. VULKAN_HPP_NAMESPACE::Bool32 storageBuffer16BitAccess = {};
  56749. VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer16BitAccess = {};
  56750. VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant16 = {};
  56751. VULKAN_HPP_NAMESPACE::Bool32 storageInputOutput16 = {};
  56752. VULKAN_HPP_NAMESPACE::Bool32 multiview = {};
  56753. VULKAN_HPP_NAMESPACE::Bool32 multiviewGeometryShader = {};
  56754. VULKAN_HPP_NAMESPACE::Bool32 multiviewTessellationShader = {};
  56755. VULKAN_HPP_NAMESPACE::Bool32 variablePointersStorageBuffer = {};
  56756. VULKAN_HPP_NAMESPACE::Bool32 variablePointers = {};
  56757. VULKAN_HPP_NAMESPACE::Bool32 protectedMemory = {};
  56758. VULKAN_HPP_NAMESPACE::Bool32 samplerYcbcrConversion = {};
  56759. VULKAN_HPP_NAMESPACE::Bool32 shaderDrawParameters = {};
  56760. };
  56761. static_assert( sizeof( PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ), "struct and wrapper have different size!" );
  56762. static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
  56763. template <>
  56764. struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Features>
  56765. {
  56766. using Type = PhysicalDeviceVulkan11Features;
  56767. };
  56768. struct PhysicalDeviceVulkan11Properties
  56769. {
  56770. static const bool allowDuplicate = false;
  56771. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan11Properties;
  56772. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56773. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties(std::array<uint8_t,VK_UUID_SIZE> const& deviceUUID_ = {}, std::array<uint8_t,VK_UUID_SIZE> const& driverUUID_ = {}, std::array<uint8_t,VK_LUID_SIZE> const& deviceLUID_ = {}, uint32_t deviceNodeMask_ = {}, VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid_ = {}, uint32_t subgroupSize_ = {}, VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages_ = {}, VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages_ = {}, VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior_ = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes, uint32_t maxMultiviewViewCount_ = {}, uint32_t maxMultiviewInstanceIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault_ = {}, uint32_t maxPerSetDescriptors_ = {}, VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize_ = {}) VULKAN_HPP_NOEXCEPT
  56774. : deviceUUID( deviceUUID_ ), driverUUID( driverUUID_ ), deviceLUID( deviceLUID_ ), deviceNodeMask( deviceNodeMask_ ), deviceLUIDValid( deviceLUIDValid_ ), subgroupSize( subgroupSize_ ), subgroupSupportedStages( subgroupSupportedStages_ ), subgroupSupportedOperations( subgroupSupportedOperations_ ), subgroupQuadOperationsInAllStages( subgroupQuadOperationsInAllStages_ ), pointClippingBehavior( pointClippingBehavior_ ), maxMultiviewViewCount( maxMultiviewViewCount_ ), maxMultiviewInstanceIndex( maxMultiviewInstanceIndex_ ), protectedNoFault( protectedNoFault_ ), maxPerSetDescriptors( maxPerSetDescriptors_ ), maxMemoryAllocationSize( maxMemoryAllocationSize_ )
  56775. {}
  56776. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56777. PhysicalDeviceVulkan11Properties( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  56778. : PhysicalDeviceVulkan11Properties( *reinterpret_cast<PhysicalDeviceVulkan11Properties const *>( &rhs ) )
  56779. {}
  56780. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56781. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan11Properties & operator=( PhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56782. PhysicalDeviceVulkan11Properties & operator=( VkPhysicalDeviceVulkan11Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  56783. {
  56784. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties const *>( &rhs );
  56785. return *this;
  56786. }
  56787. operator VkPhysicalDeviceVulkan11Properties const&() const VULKAN_HPP_NOEXCEPT
  56788. {
  56789. return *reinterpret_cast<const VkPhysicalDeviceVulkan11Properties*>( this );
  56790. }
  56791. operator VkPhysicalDeviceVulkan11Properties &() VULKAN_HPP_NOEXCEPT
  56792. {
  56793. return *reinterpret_cast<VkPhysicalDeviceVulkan11Properties*>( this );
  56794. }
  56795. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  56796. auto operator<=>( PhysicalDeviceVulkan11Properties const& ) const = default;
  56797. #else
  56798. bool operator==( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  56799. {
  56800. return ( sType == rhs.sType )
  56801. && ( pNext == rhs.pNext )
  56802. && ( deviceUUID == rhs.deviceUUID )
  56803. && ( driverUUID == rhs.driverUUID )
  56804. && ( deviceLUID == rhs.deviceLUID )
  56805. && ( deviceNodeMask == rhs.deviceNodeMask )
  56806. && ( deviceLUIDValid == rhs.deviceLUIDValid )
  56807. && ( subgroupSize == rhs.subgroupSize )
  56808. && ( subgroupSupportedStages == rhs.subgroupSupportedStages )
  56809. && ( subgroupSupportedOperations == rhs.subgroupSupportedOperations )
  56810. && ( subgroupQuadOperationsInAllStages == rhs.subgroupQuadOperationsInAllStages )
  56811. && ( pointClippingBehavior == rhs.pointClippingBehavior )
  56812. && ( maxMultiviewViewCount == rhs.maxMultiviewViewCount )
  56813. && ( maxMultiviewInstanceIndex == rhs.maxMultiviewInstanceIndex )
  56814. && ( protectedNoFault == rhs.protectedNoFault )
  56815. && ( maxPerSetDescriptors == rhs.maxPerSetDescriptors )
  56816. && ( maxMemoryAllocationSize == rhs.maxMemoryAllocationSize );
  56817. }
  56818. bool operator!=( PhysicalDeviceVulkan11Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  56819. {
  56820. return !operator==( rhs );
  56821. }
  56822. #endif
  56823. public:
  56824. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan11Properties;
  56825. void* pNext = {};
  56826. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> deviceUUID = {};
  56827. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_UUID_SIZE> driverUUID = {};
  56828. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, VK_LUID_SIZE> deviceLUID = {};
  56829. uint32_t deviceNodeMask = {};
  56830. VULKAN_HPP_NAMESPACE::Bool32 deviceLUIDValid = {};
  56831. uint32_t subgroupSize = {};
  56832. VULKAN_HPP_NAMESPACE::ShaderStageFlags subgroupSupportedStages = {};
  56833. VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags subgroupSupportedOperations = {};
  56834. VULKAN_HPP_NAMESPACE::Bool32 subgroupQuadOperationsInAllStages = {};
  56835. VULKAN_HPP_NAMESPACE::PointClippingBehavior pointClippingBehavior = VULKAN_HPP_NAMESPACE::PointClippingBehavior::eAllClipPlanes;
  56836. uint32_t maxMultiviewViewCount = {};
  56837. uint32_t maxMultiviewInstanceIndex = {};
  56838. VULKAN_HPP_NAMESPACE::Bool32 protectedNoFault = {};
  56839. uint32_t maxPerSetDescriptors = {};
  56840. VULKAN_HPP_NAMESPACE::DeviceSize maxMemoryAllocationSize = {};
  56841. };
  56842. static_assert( sizeof( PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ), "struct and wrapper have different size!" );
  56843. static_assert( std::is_standard_layout<PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
  56844. template <>
  56845. struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan11Properties>
  56846. {
  56847. using Type = PhysicalDeviceVulkan11Properties;
  56848. };
  56849. struct PhysicalDeviceVulkan12Features
  56850. {
  56851. static const bool allowDuplicate = false;
  56852. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Features;
  56853. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56854. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features(VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ = {}, VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ = {}, VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ = {}, VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ = {}, VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ = {}, VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ = {}, VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ = {}, VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ = {}, VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ = {}, VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ = {}) VULKAN_HPP_NOEXCEPT
  56855. : samplerMirrorClampToEdge( samplerMirrorClampToEdge_ ), drawIndirectCount( drawIndirectCount_ ), storageBuffer8BitAccess( storageBuffer8BitAccess_ ), uniformAndStorageBuffer8BitAccess( uniformAndStorageBuffer8BitAccess_ ), storagePushConstant8( storagePushConstant8_ ), shaderBufferInt64Atomics( shaderBufferInt64Atomics_ ), shaderSharedInt64Atomics( shaderSharedInt64Atomics_ ), shaderFloat16( shaderFloat16_ ), shaderInt8( shaderInt8_ ), descriptorIndexing( descriptorIndexing_ ), shaderInputAttachmentArrayDynamicIndexing( shaderInputAttachmentArrayDynamicIndexing_ ), shaderUniformTexelBufferArrayDynamicIndexing( shaderUniformTexelBufferArrayDynamicIndexing_ ), shaderStorageTexelBufferArrayDynamicIndexing( shaderStorageTexelBufferArrayDynamicIndexing_ ), shaderUniformBufferArrayNonUniformIndexing( shaderUniformBufferArrayNonUniformIndexing_ ), shaderSampledImageArrayNonUniformIndexing( shaderSampledImageArrayNonUniformIndexing_ ), shaderStorageBufferArrayNonUniformIndexing( shaderStorageBufferArrayNonUniformIndexing_ ), shaderStorageImageArrayNonUniformIndexing( shaderStorageImageArrayNonUniformIndexing_ ), shaderInputAttachmentArrayNonUniformIndexing( shaderInputAttachmentArrayNonUniformIndexing_ ), shaderUniformTexelBufferArrayNonUniformIndexing( shaderUniformTexelBufferArrayNonUniformIndexing_ ), shaderStorageTexelBufferArrayNonUniformIndexing( shaderStorageTexelBufferArrayNonUniformIndexing_ ), descriptorBindingUniformBufferUpdateAfterBind( descriptorBindingUniformBufferUpdateAfterBind_ ), descriptorBindingSampledImageUpdateAfterBind( descriptorBindingSampledImageUpdateAfterBind_ ), descriptorBindingStorageImageUpdateAfterBind( descriptorBindingStorageImageUpdateAfterBind_ ), descriptorBindingStorageBufferUpdateAfterBind( descriptorBindingStorageBufferUpdateAfterBind_ ), descriptorBindingUniformTexelBufferUpdateAfterBind( descriptorBindingUniformTexelBufferUpdateAfterBind_ ), descriptorBindingStorageTexelBufferUpdateAfterBind( descriptorBindingStorageTexelBufferUpdateAfterBind_ ), descriptorBindingUpdateUnusedWhilePending( descriptorBindingUpdateUnusedWhilePending_ ), descriptorBindingPartiallyBound( descriptorBindingPartiallyBound_ ), descriptorBindingVariableDescriptorCount( descriptorBindingVariableDescriptorCount_ ), runtimeDescriptorArray( runtimeDescriptorArray_ ), samplerFilterMinmax( samplerFilterMinmax_ ), scalarBlockLayout( scalarBlockLayout_ ), imagelessFramebuffer( imagelessFramebuffer_ ), uniformBufferStandardLayout( uniformBufferStandardLayout_ ), shaderSubgroupExtendedTypes( shaderSubgroupExtendedTypes_ ), separateDepthStencilLayouts( separateDepthStencilLayouts_ ), hostQueryReset( hostQueryReset_ ), timelineSemaphore( timelineSemaphore_ ), bufferDeviceAddress( bufferDeviceAddress_ ), bufferDeviceAddressCaptureReplay( bufferDeviceAddressCaptureReplay_ ), bufferDeviceAddressMultiDevice( bufferDeviceAddressMultiDevice_ ), vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ ), shaderOutputViewportIndex( shaderOutputViewportIndex_ ), shaderOutputLayer( shaderOutputLayer_ ), subgroupBroadcastDynamicId( subgroupBroadcastDynamicId_ )
  56856. {}
  56857. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkan12Features( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56858. PhysicalDeviceVulkan12Features( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
  56859. : PhysicalDeviceVulkan12Features( *reinterpret_cast<PhysicalDeviceVulkan12Features const *>( &rhs ) )
  56860. {}
  56861. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  56862. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Features & operator=( PhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  56863. PhysicalDeviceVulkan12Features & operator=( VkPhysicalDeviceVulkan12Features const & rhs ) VULKAN_HPP_NOEXCEPT
  56864. {
  56865. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features const *>( &rhs );
  56866. return *this;
  56867. }
  56868. PhysicalDeviceVulkan12Features & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  56869. {
  56870. pNext = pNext_;
  56871. return *this;
  56872. }
  56873. PhysicalDeviceVulkan12Features & setSamplerMirrorClampToEdge( VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge_ ) VULKAN_HPP_NOEXCEPT
  56874. {
  56875. samplerMirrorClampToEdge = samplerMirrorClampToEdge_;
  56876. return *this;
  56877. }
  56878. PhysicalDeviceVulkan12Features & setDrawIndirectCount( VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount_ ) VULKAN_HPP_NOEXCEPT
  56879. {
  56880. drawIndirectCount = drawIndirectCount_;
  56881. return *this;
  56882. }
  56883. PhysicalDeviceVulkan12Features & setStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
  56884. {
  56885. storageBuffer8BitAccess = storageBuffer8BitAccess_;
  56886. return *this;
  56887. }
  56888. PhysicalDeviceVulkan12Features & setUniformAndStorageBuffer8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess_ ) VULKAN_HPP_NOEXCEPT
  56889. {
  56890. uniformAndStorageBuffer8BitAccess = uniformAndStorageBuffer8BitAccess_;
  56891. return *this;
  56892. }
  56893. PhysicalDeviceVulkan12Features & setStoragePushConstant8( VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8_ ) VULKAN_HPP_NOEXCEPT
  56894. {
  56895. storagePushConstant8 = storagePushConstant8_;
  56896. return *this;
  56897. }
  56898. PhysicalDeviceVulkan12Features & setShaderBufferInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  56899. {
  56900. shaderBufferInt64Atomics = shaderBufferInt64Atomics_;
  56901. return *this;
  56902. }
  56903. PhysicalDeviceVulkan12Features & setShaderSharedInt64Atomics( VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics_ ) VULKAN_HPP_NOEXCEPT
  56904. {
  56905. shaderSharedInt64Atomics = shaderSharedInt64Atomics_;
  56906. return *this;
  56907. }
  56908. PhysicalDeviceVulkan12Features & setShaderFloat16( VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16_ ) VULKAN_HPP_NOEXCEPT
  56909. {
  56910. shaderFloat16 = shaderFloat16_;
  56911. return *this;
  56912. }
  56913. PhysicalDeviceVulkan12Features & setShaderInt8( VULKAN_HPP_NAMESPACE::Bool32 shaderInt8_ ) VULKAN_HPP_NOEXCEPT
  56914. {
  56915. shaderInt8 = shaderInt8_;
  56916. return *this;
  56917. }
  56918. PhysicalDeviceVulkan12Features & setDescriptorIndexing( VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing_ ) VULKAN_HPP_NOEXCEPT
  56919. {
  56920. descriptorIndexing = descriptorIndexing_;
  56921. return *this;
  56922. }
  56923. PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  56924. {
  56925. shaderInputAttachmentArrayDynamicIndexing = shaderInputAttachmentArrayDynamicIndexing_;
  56926. return *this;
  56927. }
  56928. PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  56929. {
  56930. shaderUniformTexelBufferArrayDynamicIndexing = shaderUniformTexelBufferArrayDynamicIndexing_;
  56931. return *this;
  56932. }
  56933. PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayDynamicIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing_ ) VULKAN_HPP_NOEXCEPT
  56934. {
  56935. shaderStorageTexelBufferArrayDynamicIndexing = shaderStorageTexelBufferArrayDynamicIndexing_;
  56936. return *this;
  56937. }
  56938. PhysicalDeviceVulkan12Features & setShaderUniformBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56939. {
  56940. shaderUniformBufferArrayNonUniformIndexing = shaderUniformBufferArrayNonUniformIndexing_;
  56941. return *this;
  56942. }
  56943. PhysicalDeviceVulkan12Features & setShaderSampledImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56944. {
  56945. shaderSampledImageArrayNonUniformIndexing = shaderSampledImageArrayNonUniformIndexing_;
  56946. return *this;
  56947. }
  56948. PhysicalDeviceVulkan12Features & setShaderStorageBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56949. {
  56950. shaderStorageBufferArrayNonUniformIndexing = shaderStorageBufferArrayNonUniformIndexing_;
  56951. return *this;
  56952. }
  56953. PhysicalDeviceVulkan12Features & setShaderStorageImageArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56954. {
  56955. shaderStorageImageArrayNonUniformIndexing = shaderStorageImageArrayNonUniformIndexing_;
  56956. return *this;
  56957. }
  56958. PhysicalDeviceVulkan12Features & setShaderInputAttachmentArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56959. {
  56960. shaderInputAttachmentArrayNonUniformIndexing = shaderInputAttachmentArrayNonUniformIndexing_;
  56961. return *this;
  56962. }
  56963. PhysicalDeviceVulkan12Features & setShaderUniformTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56964. {
  56965. shaderUniformTexelBufferArrayNonUniformIndexing = shaderUniformTexelBufferArrayNonUniformIndexing_;
  56966. return *this;
  56967. }
  56968. PhysicalDeviceVulkan12Features & setShaderStorageTexelBufferArrayNonUniformIndexing( VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing_ ) VULKAN_HPP_NOEXCEPT
  56969. {
  56970. shaderStorageTexelBufferArrayNonUniformIndexing = shaderStorageTexelBufferArrayNonUniformIndexing_;
  56971. return *this;
  56972. }
  56973. PhysicalDeviceVulkan12Features & setDescriptorBindingUniformBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56974. {
  56975. descriptorBindingUniformBufferUpdateAfterBind = descriptorBindingUniformBufferUpdateAfterBind_;
  56976. return *this;
  56977. }
  56978. PhysicalDeviceVulkan12Features & setDescriptorBindingSampledImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56979. {
  56980. descriptorBindingSampledImageUpdateAfterBind = descriptorBindingSampledImageUpdateAfterBind_;
  56981. return *this;
  56982. }
  56983. PhysicalDeviceVulkan12Features & setDescriptorBindingStorageImageUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56984. {
  56985. descriptorBindingStorageImageUpdateAfterBind = descriptorBindingStorageImageUpdateAfterBind_;
  56986. return *this;
  56987. }
  56988. PhysicalDeviceVulkan12Features & setDescriptorBindingStorageBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56989. {
  56990. descriptorBindingStorageBufferUpdateAfterBind = descriptorBindingStorageBufferUpdateAfterBind_;
  56991. return *this;
  56992. }
  56993. PhysicalDeviceVulkan12Features & setDescriptorBindingUniformTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56994. {
  56995. descriptorBindingUniformTexelBufferUpdateAfterBind = descriptorBindingUniformTexelBufferUpdateAfterBind_;
  56996. return *this;
  56997. }
  56998. PhysicalDeviceVulkan12Features & setDescriptorBindingStorageTexelBufferUpdateAfterBind( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind_ ) VULKAN_HPP_NOEXCEPT
  56999. {
  57000. descriptorBindingStorageTexelBufferUpdateAfterBind = descriptorBindingStorageTexelBufferUpdateAfterBind_;
  57001. return *this;
  57002. }
  57003. PhysicalDeviceVulkan12Features & setDescriptorBindingUpdateUnusedWhilePending( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending_ ) VULKAN_HPP_NOEXCEPT
  57004. {
  57005. descriptorBindingUpdateUnusedWhilePending = descriptorBindingUpdateUnusedWhilePending_;
  57006. return *this;
  57007. }
  57008. PhysicalDeviceVulkan12Features & setDescriptorBindingPartiallyBound( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound_ ) VULKAN_HPP_NOEXCEPT
  57009. {
  57010. descriptorBindingPartiallyBound = descriptorBindingPartiallyBound_;
  57011. return *this;
  57012. }
  57013. PhysicalDeviceVulkan12Features & setDescriptorBindingVariableDescriptorCount( VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount_ ) VULKAN_HPP_NOEXCEPT
  57014. {
  57015. descriptorBindingVariableDescriptorCount = descriptorBindingVariableDescriptorCount_;
  57016. return *this;
  57017. }
  57018. PhysicalDeviceVulkan12Features & setRuntimeDescriptorArray( VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray_ ) VULKAN_HPP_NOEXCEPT
  57019. {
  57020. runtimeDescriptorArray = runtimeDescriptorArray_;
  57021. return *this;
  57022. }
  57023. PhysicalDeviceVulkan12Features & setSamplerFilterMinmax( VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax_ ) VULKAN_HPP_NOEXCEPT
  57024. {
  57025. samplerFilterMinmax = samplerFilterMinmax_;
  57026. return *this;
  57027. }
  57028. PhysicalDeviceVulkan12Features & setScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
  57029. {
  57030. scalarBlockLayout = scalarBlockLayout_;
  57031. return *this;
  57032. }
  57033. PhysicalDeviceVulkan12Features & setImagelessFramebuffer( VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer_ ) VULKAN_HPP_NOEXCEPT
  57034. {
  57035. imagelessFramebuffer = imagelessFramebuffer_;
  57036. return *this;
  57037. }
  57038. PhysicalDeviceVulkan12Features & setUniformBufferStandardLayout( VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout_ ) VULKAN_HPP_NOEXCEPT
  57039. {
  57040. uniformBufferStandardLayout = uniformBufferStandardLayout_;
  57041. return *this;
  57042. }
  57043. PhysicalDeviceVulkan12Features & setShaderSubgroupExtendedTypes( VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes_ ) VULKAN_HPP_NOEXCEPT
  57044. {
  57045. shaderSubgroupExtendedTypes = shaderSubgroupExtendedTypes_;
  57046. return *this;
  57047. }
  57048. PhysicalDeviceVulkan12Features & setSeparateDepthStencilLayouts( VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts_ ) VULKAN_HPP_NOEXCEPT
  57049. {
  57050. separateDepthStencilLayouts = separateDepthStencilLayouts_;
  57051. return *this;
  57052. }
  57053. PhysicalDeviceVulkan12Features & setHostQueryReset( VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset_ ) VULKAN_HPP_NOEXCEPT
  57054. {
  57055. hostQueryReset = hostQueryReset_;
  57056. return *this;
  57057. }
  57058. PhysicalDeviceVulkan12Features & setTimelineSemaphore( VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore_ ) VULKAN_HPP_NOEXCEPT
  57059. {
  57060. timelineSemaphore = timelineSemaphore_;
  57061. return *this;
  57062. }
  57063. PhysicalDeviceVulkan12Features & setBufferDeviceAddress( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress_ ) VULKAN_HPP_NOEXCEPT
  57064. {
  57065. bufferDeviceAddress = bufferDeviceAddress_;
  57066. return *this;
  57067. }
  57068. PhysicalDeviceVulkan12Features & setBufferDeviceAddressCaptureReplay( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay_ ) VULKAN_HPP_NOEXCEPT
  57069. {
  57070. bufferDeviceAddressCaptureReplay = bufferDeviceAddressCaptureReplay_;
  57071. return *this;
  57072. }
  57073. PhysicalDeviceVulkan12Features & setBufferDeviceAddressMultiDevice( VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice_ ) VULKAN_HPP_NOEXCEPT
  57074. {
  57075. bufferDeviceAddressMultiDevice = bufferDeviceAddressMultiDevice_;
  57076. return *this;
  57077. }
  57078. PhysicalDeviceVulkan12Features & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
  57079. {
  57080. vulkanMemoryModel = vulkanMemoryModel_;
  57081. return *this;
  57082. }
  57083. PhysicalDeviceVulkan12Features & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
  57084. {
  57085. vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
  57086. return *this;
  57087. }
  57088. PhysicalDeviceVulkan12Features & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
  57089. {
  57090. vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
  57091. return *this;
  57092. }
  57093. PhysicalDeviceVulkan12Features & setShaderOutputViewportIndex( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex_ ) VULKAN_HPP_NOEXCEPT
  57094. {
  57095. shaderOutputViewportIndex = shaderOutputViewportIndex_;
  57096. return *this;
  57097. }
  57098. PhysicalDeviceVulkan12Features & setShaderOutputLayer( VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer_ ) VULKAN_HPP_NOEXCEPT
  57099. {
  57100. shaderOutputLayer = shaderOutputLayer_;
  57101. return *this;
  57102. }
  57103. PhysicalDeviceVulkan12Features & setSubgroupBroadcastDynamicId( VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId_ ) VULKAN_HPP_NOEXCEPT
  57104. {
  57105. subgroupBroadcastDynamicId = subgroupBroadcastDynamicId_;
  57106. return *this;
  57107. }
  57108. operator VkPhysicalDeviceVulkan12Features const&() const VULKAN_HPP_NOEXCEPT
  57109. {
  57110. return *reinterpret_cast<const VkPhysicalDeviceVulkan12Features*>( this );
  57111. }
  57112. operator VkPhysicalDeviceVulkan12Features &() VULKAN_HPP_NOEXCEPT
  57113. {
  57114. return *reinterpret_cast<VkPhysicalDeviceVulkan12Features*>( this );
  57115. }
  57116. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57117. auto operator<=>( PhysicalDeviceVulkan12Features const& ) const = default;
  57118. #else
  57119. bool operator==( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  57120. {
  57121. return ( sType == rhs.sType )
  57122. && ( pNext == rhs.pNext )
  57123. && ( samplerMirrorClampToEdge == rhs.samplerMirrorClampToEdge )
  57124. && ( drawIndirectCount == rhs.drawIndirectCount )
  57125. && ( storageBuffer8BitAccess == rhs.storageBuffer8BitAccess )
  57126. && ( uniformAndStorageBuffer8BitAccess == rhs.uniformAndStorageBuffer8BitAccess )
  57127. && ( storagePushConstant8 == rhs.storagePushConstant8 )
  57128. && ( shaderBufferInt64Atomics == rhs.shaderBufferInt64Atomics )
  57129. && ( shaderSharedInt64Atomics == rhs.shaderSharedInt64Atomics )
  57130. && ( shaderFloat16 == rhs.shaderFloat16 )
  57131. && ( shaderInt8 == rhs.shaderInt8 )
  57132. && ( descriptorIndexing == rhs.descriptorIndexing )
  57133. && ( shaderInputAttachmentArrayDynamicIndexing == rhs.shaderInputAttachmentArrayDynamicIndexing )
  57134. && ( shaderUniformTexelBufferArrayDynamicIndexing == rhs.shaderUniformTexelBufferArrayDynamicIndexing )
  57135. && ( shaderStorageTexelBufferArrayDynamicIndexing == rhs.shaderStorageTexelBufferArrayDynamicIndexing )
  57136. && ( shaderUniformBufferArrayNonUniformIndexing == rhs.shaderUniformBufferArrayNonUniformIndexing )
  57137. && ( shaderSampledImageArrayNonUniformIndexing == rhs.shaderSampledImageArrayNonUniformIndexing )
  57138. && ( shaderStorageBufferArrayNonUniformIndexing == rhs.shaderStorageBufferArrayNonUniformIndexing )
  57139. && ( shaderStorageImageArrayNonUniformIndexing == rhs.shaderStorageImageArrayNonUniformIndexing )
  57140. && ( shaderInputAttachmentArrayNonUniformIndexing == rhs.shaderInputAttachmentArrayNonUniformIndexing )
  57141. && ( shaderUniformTexelBufferArrayNonUniformIndexing == rhs.shaderUniformTexelBufferArrayNonUniformIndexing )
  57142. && ( shaderStorageTexelBufferArrayNonUniformIndexing == rhs.shaderStorageTexelBufferArrayNonUniformIndexing )
  57143. && ( descriptorBindingUniformBufferUpdateAfterBind == rhs.descriptorBindingUniformBufferUpdateAfterBind )
  57144. && ( descriptorBindingSampledImageUpdateAfterBind == rhs.descriptorBindingSampledImageUpdateAfterBind )
  57145. && ( descriptorBindingStorageImageUpdateAfterBind == rhs.descriptorBindingStorageImageUpdateAfterBind )
  57146. && ( descriptorBindingStorageBufferUpdateAfterBind == rhs.descriptorBindingStorageBufferUpdateAfterBind )
  57147. && ( descriptorBindingUniformTexelBufferUpdateAfterBind == rhs.descriptorBindingUniformTexelBufferUpdateAfterBind )
  57148. && ( descriptorBindingStorageTexelBufferUpdateAfterBind == rhs.descriptorBindingStorageTexelBufferUpdateAfterBind )
  57149. && ( descriptorBindingUpdateUnusedWhilePending == rhs.descriptorBindingUpdateUnusedWhilePending )
  57150. && ( descriptorBindingPartiallyBound == rhs.descriptorBindingPartiallyBound )
  57151. && ( descriptorBindingVariableDescriptorCount == rhs.descriptorBindingVariableDescriptorCount )
  57152. && ( runtimeDescriptorArray == rhs.runtimeDescriptorArray )
  57153. && ( samplerFilterMinmax == rhs.samplerFilterMinmax )
  57154. && ( scalarBlockLayout == rhs.scalarBlockLayout )
  57155. && ( imagelessFramebuffer == rhs.imagelessFramebuffer )
  57156. && ( uniformBufferStandardLayout == rhs.uniformBufferStandardLayout )
  57157. && ( shaderSubgroupExtendedTypes == rhs.shaderSubgroupExtendedTypes )
  57158. && ( separateDepthStencilLayouts == rhs.separateDepthStencilLayouts )
  57159. && ( hostQueryReset == rhs.hostQueryReset )
  57160. && ( timelineSemaphore == rhs.timelineSemaphore )
  57161. && ( bufferDeviceAddress == rhs.bufferDeviceAddress )
  57162. && ( bufferDeviceAddressCaptureReplay == rhs.bufferDeviceAddressCaptureReplay )
  57163. && ( bufferDeviceAddressMultiDevice == rhs.bufferDeviceAddressMultiDevice )
  57164. && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
  57165. && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
  57166. && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains )
  57167. && ( shaderOutputViewportIndex == rhs.shaderOutputViewportIndex )
  57168. && ( shaderOutputLayer == rhs.shaderOutputLayer )
  57169. && ( subgroupBroadcastDynamicId == rhs.subgroupBroadcastDynamicId );
  57170. }
  57171. bool operator!=( PhysicalDeviceVulkan12Features const& rhs ) const VULKAN_HPP_NOEXCEPT
  57172. {
  57173. return !operator==( rhs );
  57174. }
  57175. #endif
  57176. public:
  57177. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Features;
  57178. void* pNext = {};
  57179. VULKAN_HPP_NAMESPACE::Bool32 samplerMirrorClampToEdge = {};
  57180. VULKAN_HPP_NAMESPACE::Bool32 drawIndirectCount = {};
  57181. VULKAN_HPP_NAMESPACE::Bool32 storageBuffer8BitAccess = {};
  57182. VULKAN_HPP_NAMESPACE::Bool32 uniformAndStorageBuffer8BitAccess = {};
  57183. VULKAN_HPP_NAMESPACE::Bool32 storagePushConstant8 = {};
  57184. VULKAN_HPP_NAMESPACE::Bool32 shaderBufferInt64Atomics = {};
  57185. VULKAN_HPP_NAMESPACE::Bool32 shaderSharedInt64Atomics = {};
  57186. VULKAN_HPP_NAMESPACE::Bool32 shaderFloat16 = {};
  57187. VULKAN_HPP_NAMESPACE::Bool32 shaderInt8 = {};
  57188. VULKAN_HPP_NAMESPACE::Bool32 descriptorIndexing = {};
  57189. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayDynamicIndexing = {};
  57190. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayDynamicIndexing = {};
  57191. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayDynamicIndexing = {};
  57192. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexing = {};
  57193. VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexing = {};
  57194. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexing = {};
  57195. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexing = {};
  57196. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexing = {};
  57197. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformTexelBufferArrayNonUniformIndexing = {};
  57198. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageTexelBufferArrayNonUniformIndexing = {};
  57199. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformBufferUpdateAfterBind = {};
  57200. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingSampledImageUpdateAfterBind = {};
  57201. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageImageUpdateAfterBind = {};
  57202. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageBufferUpdateAfterBind = {};
  57203. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUniformTexelBufferUpdateAfterBind = {};
  57204. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingStorageTexelBufferUpdateAfterBind = {};
  57205. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingUpdateUnusedWhilePending = {};
  57206. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingPartiallyBound = {};
  57207. VULKAN_HPP_NAMESPACE::Bool32 descriptorBindingVariableDescriptorCount = {};
  57208. VULKAN_HPP_NAMESPACE::Bool32 runtimeDescriptorArray = {};
  57209. VULKAN_HPP_NAMESPACE::Bool32 samplerFilterMinmax = {};
  57210. VULKAN_HPP_NAMESPACE::Bool32 scalarBlockLayout = {};
  57211. VULKAN_HPP_NAMESPACE::Bool32 imagelessFramebuffer = {};
  57212. VULKAN_HPP_NAMESPACE::Bool32 uniformBufferStandardLayout = {};
  57213. VULKAN_HPP_NAMESPACE::Bool32 shaderSubgroupExtendedTypes = {};
  57214. VULKAN_HPP_NAMESPACE::Bool32 separateDepthStencilLayouts = {};
  57215. VULKAN_HPP_NAMESPACE::Bool32 hostQueryReset = {};
  57216. VULKAN_HPP_NAMESPACE::Bool32 timelineSemaphore = {};
  57217. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddress = {};
  57218. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressCaptureReplay = {};
  57219. VULKAN_HPP_NAMESPACE::Bool32 bufferDeviceAddressMultiDevice = {};
  57220. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
  57221. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
  57222. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
  57223. VULKAN_HPP_NAMESPACE::Bool32 shaderOutputViewportIndex = {};
  57224. VULKAN_HPP_NAMESPACE::Bool32 shaderOutputLayer = {};
  57225. VULKAN_HPP_NAMESPACE::Bool32 subgroupBroadcastDynamicId = {};
  57226. };
  57227. static_assert( sizeof( PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ), "struct and wrapper have different size!" );
  57228. static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
  57229. template <>
  57230. struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Features>
  57231. {
  57232. using Type = PhysicalDeviceVulkan12Features;
  57233. };
  57234. struct PhysicalDeviceVulkan12Properties
  57235. {
  57236. static const bool allowDuplicate = false;
  57237. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkan12Properties;
  57238. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57239. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties(VULKAN_HPP_NAMESPACE::DriverId driverID_ = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary, std::array<char,VK_MAX_DRIVER_NAME_SIZE> const& driverName_ = {}, std::array<char,VK_MAX_DRIVER_INFO_SIZE> const& driverInfo_ = {}, VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion_ = {}, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence_ = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64_ = {}, uint32_t maxUpdateAfterBindDescriptorsInAllPools_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative_ = {}, VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind_ = {}, VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSamplers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages_ = {}, uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments_ = {}, uint32_t maxPerStageUpdateAfterBindResources_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSamplers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ = {}, uint32_t maxDescriptorSetUpdateAfterBindSampledImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindStorageImages_ = {}, uint32_t maxDescriptorSetUpdateAfterBindInputAttachments_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes_ = {}, VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone_ = {}, VULKAN_HPP_NAMESPACE::Bool32 independentResolve_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats_ = {}, VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping_ = {}, uint64_t maxTimelineSemaphoreValueDifference_ = {}, VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts_ = {}) VULKAN_HPP_NOEXCEPT
  57240. : driverID( driverID_ ), driverName( driverName_ ), driverInfo( driverInfo_ ), conformanceVersion( conformanceVersion_ ), denormBehaviorIndependence( denormBehaviorIndependence_ ), roundingModeIndependence( roundingModeIndependence_ ), shaderSignedZeroInfNanPreserveFloat16( shaderSignedZeroInfNanPreserveFloat16_ ), shaderSignedZeroInfNanPreserveFloat32( shaderSignedZeroInfNanPreserveFloat32_ ), shaderSignedZeroInfNanPreserveFloat64( shaderSignedZeroInfNanPreserveFloat64_ ), shaderDenormPreserveFloat16( shaderDenormPreserveFloat16_ ), shaderDenormPreserveFloat32( shaderDenormPreserveFloat32_ ), shaderDenormPreserveFloat64( shaderDenormPreserveFloat64_ ), shaderDenormFlushToZeroFloat16( shaderDenormFlushToZeroFloat16_ ), shaderDenormFlushToZeroFloat32( shaderDenormFlushToZeroFloat32_ ), shaderDenormFlushToZeroFloat64( shaderDenormFlushToZeroFloat64_ ), shaderRoundingModeRTEFloat16( shaderRoundingModeRTEFloat16_ ), shaderRoundingModeRTEFloat32( shaderRoundingModeRTEFloat32_ ), shaderRoundingModeRTEFloat64( shaderRoundingModeRTEFloat64_ ), shaderRoundingModeRTZFloat16( shaderRoundingModeRTZFloat16_ ), shaderRoundingModeRTZFloat32( shaderRoundingModeRTZFloat32_ ), shaderRoundingModeRTZFloat64( shaderRoundingModeRTZFloat64_ ), maxUpdateAfterBindDescriptorsInAllPools( maxUpdateAfterBindDescriptorsInAllPools_ ), shaderUniformBufferArrayNonUniformIndexingNative( shaderUniformBufferArrayNonUniformIndexingNative_ ), shaderSampledImageArrayNonUniformIndexingNative( shaderSampledImageArrayNonUniformIndexingNative_ ), shaderStorageBufferArrayNonUniformIndexingNative( shaderStorageBufferArrayNonUniformIndexingNative_ ), shaderStorageImageArrayNonUniformIndexingNative( shaderStorageImageArrayNonUniformIndexingNative_ ), shaderInputAttachmentArrayNonUniformIndexingNative( shaderInputAttachmentArrayNonUniformIndexingNative_ ), robustBufferAccessUpdateAfterBind( robustBufferAccessUpdateAfterBind_ ), quadDivergentImplicitLod( quadDivergentImplicitLod_ ), maxPerStageDescriptorUpdateAfterBindSamplers( maxPerStageDescriptorUpdateAfterBindSamplers_ ), maxPerStageDescriptorUpdateAfterBindUniformBuffers( maxPerStageDescriptorUpdateAfterBindUniformBuffers_ ), maxPerStageDescriptorUpdateAfterBindStorageBuffers( maxPerStageDescriptorUpdateAfterBindStorageBuffers_ ), maxPerStageDescriptorUpdateAfterBindSampledImages( maxPerStageDescriptorUpdateAfterBindSampledImages_ ), maxPerStageDescriptorUpdateAfterBindStorageImages( maxPerStageDescriptorUpdateAfterBindStorageImages_ ), maxPerStageDescriptorUpdateAfterBindInputAttachments( maxPerStageDescriptorUpdateAfterBindInputAttachments_ ), maxPerStageUpdateAfterBindResources( maxPerStageUpdateAfterBindResources_ ), maxDescriptorSetUpdateAfterBindSamplers( maxDescriptorSetUpdateAfterBindSamplers_ ), maxDescriptorSetUpdateAfterBindUniformBuffers( maxDescriptorSetUpdateAfterBindUniformBuffers_ ), maxDescriptorSetUpdateAfterBindUniformBuffersDynamic( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindStorageBuffers( maxDescriptorSetUpdateAfterBindStorageBuffers_ ), maxDescriptorSetUpdateAfterBindStorageBuffersDynamic( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic_ ), maxDescriptorSetUpdateAfterBindSampledImages( maxDescriptorSetUpdateAfterBindSampledImages_ ), maxDescriptorSetUpdateAfterBindStorageImages( maxDescriptorSetUpdateAfterBindStorageImages_ ), maxDescriptorSetUpdateAfterBindInputAttachments( maxDescriptorSetUpdateAfterBindInputAttachments_ ), supportedDepthResolveModes( supportedDepthResolveModes_ ), supportedStencilResolveModes( supportedStencilResolveModes_ ), independentResolveNone( independentResolveNone_ ), independentResolve( independentResolve_ ), filterMinmaxSingleComponentFormats( filterMinmaxSingleComponentFormats_ ), filterMinmaxImageComponentMapping( filterMinmaxImageComponentMapping_ ), maxTimelineSemaphoreValueDifference( maxTimelineSemaphoreValueDifference_ ), framebufferIntegerColorSampleCounts( framebufferIntegerColorSampleCounts_ )
  57241. {}
  57242. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57243. PhysicalDeviceVulkan12Properties( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  57244. : PhysicalDeviceVulkan12Properties( *reinterpret_cast<PhysicalDeviceVulkan12Properties const *>( &rhs ) )
  57245. {}
  57246. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57247. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkan12Properties & operator=( PhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57248. PhysicalDeviceVulkan12Properties & operator=( VkPhysicalDeviceVulkan12Properties const & rhs ) VULKAN_HPP_NOEXCEPT
  57249. {
  57250. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties const *>( &rhs );
  57251. return *this;
  57252. }
  57253. operator VkPhysicalDeviceVulkan12Properties const&() const VULKAN_HPP_NOEXCEPT
  57254. {
  57255. return *reinterpret_cast<const VkPhysicalDeviceVulkan12Properties*>( this );
  57256. }
  57257. operator VkPhysicalDeviceVulkan12Properties &() VULKAN_HPP_NOEXCEPT
  57258. {
  57259. return *reinterpret_cast<VkPhysicalDeviceVulkan12Properties*>( this );
  57260. }
  57261. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57262. auto operator<=>( PhysicalDeviceVulkan12Properties const& ) const = default;
  57263. #else
  57264. bool operator==( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  57265. {
  57266. return ( sType == rhs.sType )
  57267. && ( pNext == rhs.pNext )
  57268. && ( driverID == rhs.driverID )
  57269. && ( driverName == rhs.driverName )
  57270. && ( driverInfo == rhs.driverInfo )
  57271. && ( conformanceVersion == rhs.conformanceVersion )
  57272. && ( denormBehaviorIndependence == rhs.denormBehaviorIndependence )
  57273. && ( roundingModeIndependence == rhs.roundingModeIndependence )
  57274. && ( shaderSignedZeroInfNanPreserveFloat16 == rhs.shaderSignedZeroInfNanPreserveFloat16 )
  57275. && ( shaderSignedZeroInfNanPreserveFloat32 == rhs.shaderSignedZeroInfNanPreserveFloat32 )
  57276. && ( shaderSignedZeroInfNanPreserveFloat64 == rhs.shaderSignedZeroInfNanPreserveFloat64 )
  57277. && ( shaderDenormPreserveFloat16 == rhs.shaderDenormPreserveFloat16 )
  57278. && ( shaderDenormPreserveFloat32 == rhs.shaderDenormPreserveFloat32 )
  57279. && ( shaderDenormPreserveFloat64 == rhs.shaderDenormPreserveFloat64 )
  57280. && ( shaderDenormFlushToZeroFloat16 == rhs.shaderDenormFlushToZeroFloat16 )
  57281. && ( shaderDenormFlushToZeroFloat32 == rhs.shaderDenormFlushToZeroFloat32 )
  57282. && ( shaderDenormFlushToZeroFloat64 == rhs.shaderDenormFlushToZeroFloat64 )
  57283. && ( shaderRoundingModeRTEFloat16 == rhs.shaderRoundingModeRTEFloat16 )
  57284. && ( shaderRoundingModeRTEFloat32 == rhs.shaderRoundingModeRTEFloat32 )
  57285. && ( shaderRoundingModeRTEFloat64 == rhs.shaderRoundingModeRTEFloat64 )
  57286. && ( shaderRoundingModeRTZFloat16 == rhs.shaderRoundingModeRTZFloat16 )
  57287. && ( shaderRoundingModeRTZFloat32 == rhs.shaderRoundingModeRTZFloat32 )
  57288. && ( shaderRoundingModeRTZFloat64 == rhs.shaderRoundingModeRTZFloat64 )
  57289. && ( maxUpdateAfterBindDescriptorsInAllPools == rhs.maxUpdateAfterBindDescriptorsInAllPools )
  57290. && ( shaderUniformBufferArrayNonUniformIndexingNative == rhs.shaderUniformBufferArrayNonUniformIndexingNative )
  57291. && ( shaderSampledImageArrayNonUniformIndexingNative == rhs.shaderSampledImageArrayNonUniformIndexingNative )
  57292. && ( shaderStorageBufferArrayNonUniformIndexingNative == rhs.shaderStorageBufferArrayNonUniformIndexingNative )
  57293. && ( shaderStorageImageArrayNonUniformIndexingNative == rhs.shaderStorageImageArrayNonUniformIndexingNative )
  57294. && ( shaderInputAttachmentArrayNonUniformIndexingNative == rhs.shaderInputAttachmentArrayNonUniformIndexingNative )
  57295. && ( robustBufferAccessUpdateAfterBind == rhs.robustBufferAccessUpdateAfterBind )
  57296. && ( quadDivergentImplicitLod == rhs.quadDivergentImplicitLod )
  57297. && ( maxPerStageDescriptorUpdateAfterBindSamplers == rhs.maxPerStageDescriptorUpdateAfterBindSamplers )
  57298. && ( maxPerStageDescriptorUpdateAfterBindUniformBuffers == rhs.maxPerStageDescriptorUpdateAfterBindUniformBuffers )
  57299. && ( maxPerStageDescriptorUpdateAfterBindStorageBuffers == rhs.maxPerStageDescriptorUpdateAfterBindStorageBuffers )
  57300. && ( maxPerStageDescriptorUpdateAfterBindSampledImages == rhs.maxPerStageDescriptorUpdateAfterBindSampledImages )
  57301. && ( maxPerStageDescriptorUpdateAfterBindStorageImages == rhs.maxPerStageDescriptorUpdateAfterBindStorageImages )
  57302. && ( maxPerStageDescriptorUpdateAfterBindInputAttachments == rhs.maxPerStageDescriptorUpdateAfterBindInputAttachments )
  57303. && ( maxPerStageUpdateAfterBindResources == rhs.maxPerStageUpdateAfterBindResources )
  57304. && ( maxDescriptorSetUpdateAfterBindSamplers == rhs.maxDescriptorSetUpdateAfterBindSamplers )
  57305. && ( maxDescriptorSetUpdateAfterBindUniformBuffers == rhs.maxDescriptorSetUpdateAfterBindUniformBuffers )
  57306. && ( maxDescriptorSetUpdateAfterBindUniformBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic )
  57307. && ( maxDescriptorSetUpdateAfterBindStorageBuffers == rhs.maxDescriptorSetUpdateAfterBindStorageBuffers )
  57308. && ( maxDescriptorSetUpdateAfterBindStorageBuffersDynamic == rhs.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic )
  57309. && ( maxDescriptorSetUpdateAfterBindSampledImages == rhs.maxDescriptorSetUpdateAfterBindSampledImages )
  57310. && ( maxDescriptorSetUpdateAfterBindStorageImages == rhs.maxDescriptorSetUpdateAfterBindStorageImages )
  57311. && ( maxDescriptorSetUpdateAfterBindInputAttachments == rhs.maxDescriptorSetUpdateAfterBindInputAttachments )
  57312. && ( supportedDepthResolveModes == rhs.supportedDepthResolveModes )
  57313. && ( supportedStencilResolveModes == rhs.supportedStencilResolveModes )
  57314. && ( independentResolveNone == rhs.independentResolveNone )
  57315. && ( independentResolve == rhs.independentResolve )
  57316. && ( filterMinmaxSingleComponentFormats == rhs.filterMinmaxSingleComponentFormats )
  57317. && ( filterMinmaxImageComponentMapping == rhs.filterMinmaxImageComponentMapping )
  57318. && ( maxTimelineSemaphoreValueDifference == rhs.maxTimelineSemaphoreValueDifference )
  57319. && ( framebufferIntegerColorSampleCounts == rhs.framebufferIntegerColorSampleCounts );
  57320. }
  57321. bool operator!=( PhysicalDeviceVulkan12Properties const& rhs ) const VULKAN_HPP_NOEXCEPT
  57322. {
  57323. return !operator==( rhs );
  57324. }
  57325. #endif
  57326. public:
  57327. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkan12Properties;
  57328. void* pNext = {};
  57329. VULKAN_HPP_NAMESPACE::DriverId driverID = VULKAN_HPP_NAMESPACE::DriverId::eAmdProprietary;
  57330. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_NAME_SIZE> driverName = {};
  57331. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<char, VK_MAX_DRIVER_INFO_SIZE> driverInfo = {};
  57332. VULKAN_HPP_NAMESPACE::ConformanceVersion conformanceVersion = {};
  57333. VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence denormBehaviorIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
  57334. VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence roundingModeIndependence = VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence::e32BitOnly;
  57335. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat16 = {};
  57336. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat32 = {};
  57337. VULKAN_HPP_NAMESPACE::Bool32 shaderSignedZeroInfNanPreserveFloat64 = {};
  57338. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat16 = {};
  57339. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat32 = {};
  57340. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormPreserveFloat64 = {};
  57341. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat16 = {};
  57342. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat32 = {};
  57343. VULKAN_HPP_NAMESPACE::Bool32 shaderDenormFlushToZeroFloat64 = {};
  57344. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat16 = {};
  57345. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat32 = {};
  57346. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTEFloat64 = {};
  57347. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat16 = {};
  57348. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat32 = {};
  57349. VULKAN_HPP_NAMESPACE::Bool32 shaderRoundingModeRTZFloat64 = {};
  57350. uint32_t maxUpdateAfterBindDescriptorsInAllPools = {};
  57351. VULKAN_HPP_NAMESPACE::Bool32 shaderUniformBufferArrayNonUniformIndexingNative = {};
  57352. VULKAN_HPP_NAMESPACE::Bool32 shaderSampledImageArrayNonUniformIndexingNative = {};
  57353. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageBufferArrayNonUniformIndexingNative = {};
  57354. VULKAN_HPP_NAMESPACE::Bool32 shaderStorageImageArrayNonUniformIndexingNative = {};
  57355. VULKAN_HPP_NAMESPACE::Bool32 shaderInputAttachmentArrayNonUniformIndexingNative = {};
  57356. VULKAN_HPP_NAMESPACE::Bool32 robustBufferAccessUpdateAfterBind = {};
  57357. VULKAN_HPP_NAMESPACE::Bool32 quadDivergentImplicitLod = {};
  57358. uint32_t maxPerStageDescriptorUpdateAfterBindSamplers = {};
  57359. uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers = {};
  57360. uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers = {};
  57361. uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages = {};
  57362. uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages = {};
  57363. uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments = {};
  57364. uint32_t maxPerStageUpdateAfterBindResources = {};
  57365. uint32_t maxDescriptorSetUpdateAfterBindSamplers = {};
  57366. uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers = {};
  57367. uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = {};
  57368. uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers = {};
  57369. uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = {};
  57370. uint32_t maxDescriptorSetUpdateAfterBindSampledImages = {};
  57371. uint32_t maxDescriptorSetUpdateAfterBindStorageImages = {};
  57372. uint32_t maxDescriptorSetUpdateAfterBindInputAttachments = {};
  57373. VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedDepthResolveModes = {};
  57374. VULKAN_HPP_NAMESPACE::ResolveModeFlags supportedStencilResolveModes = {};
  57375. VULKAN_HPP_NAMESPACE::Bool32 independentResolveNone = {};
  57376. VULKAN_HPP_NAMESPACE::Bool32 independentResolve = {};
  57377. VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxSingleComponentFormats = {};
  57378. VULKAN_HPP_NAMESPACE::Bool32 filterMinmaxImageComponentMapping = {};
  57379. uint64_t maxTimelineSemaphoreValueDifference = {};
  57380. VULKAN_HPP_NAMESPACE::SampleCountFlags framebufferIntegerColorSampleCounts = {};
  57381. };
  57382. static_assert( sizeof( PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ), "struct and wrapper have different size!" );
  57383. static_assert( std::is_standard_layout<PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
  57384. template <>
  57385. struct CppType<StructureType, StructureType::ePhysicalDeviceVulkan12Properties>
  57386. {
  57387. using Type = PhysicalDeviceVulkan12Properties;
  57388. };
  57389. struct PhysicalDeviceVulkanMemoryModelFeatures
  57390. {
  57391. static const bool allowDuplicate = false;
  57392. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
  57393. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57394. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures(VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ = {}, VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ = {}) VULKAN_HPP_NOEXCEPT
  57395. : vulkanMemoryModel( vulkanMemoryModel_ ), vulkanMemoryModelDeviceScope( vulkanMemoryModelDeviceScope_ ), vulkanMemoryModelAvailabilityVisibilityChains( vulkanMemoryModelAvailabilityVisibilityChains_ )
  57396. {}
  57397. VULKAN_HPP_CONSTEXPR PhysicalDeviceVulkanMemoryModelFeatures( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57398. PhysicalDeviceVulkanMemoryModelFeatures( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  57399. : PhysicalDeviceVulkanMemoryModelFeatures( *reinterpret_cast<PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs ) )
  57400. {}
  57401. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57402. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceVulkanMemoryModelFeatures & operator=( PhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57403. PhysicalDeviceVulkanMemoryModelFeatures & operator=( VkPhysicalDeviceVulkanMemoryModelFeatures const & rhs ) VULKAN_HPP_NOEXCEPT
  57404. {
  57405. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures const *>( &rhs );
  57406. return *this;
  57407. }
  57408. PhysicalDeviceVulkanMemoryModelFeatures & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57409. {
  57410. pNext = pNext_;
  57411. return *this;
  57412. }
  57413. PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModel( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel_ ) VULKAN_HPP_NOEXCEPT
  57414. {
  57415. vulkanMemoryModel = vulkanMemoryModel_;
  57416. return *this;
  57417. }
  57418. PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelDeviceScope( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope_ ) VULKAN_HPP_NOEXCEPT
  57419. {
  57420. vulkanMemoryModelDeviceScope = vulkanMemoryModelDeviceScope_;
  57421. return *this;
  57422. }
  57423. PhysicalDeviceVulkanMemoryModelFeatures & setVulkanMemoryModelAvailabilityVisibilityChains( VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains_ ) VULKAN_HPP_NOEXCEPT
  57424. {
  57425. vulkanMemoryModelAvailabilityVisibilityChains = vulkanMemoryModelAvailabilityVisibilityChains_;
  57426. return *this;
  57427. }
  57428. operator VkPhysicalDeviceVulkanMemoryModelFeatures const&() const VULKAN_HPP_NOEXCEPT
  57429. {
  57430. return *reinterpret_cast<const VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
  57431. }
  57432. operator VkPhysicalDeviceVulkanMemoryModelFeatures &() VULKAN_HPP_NOEXCEPT
  57433. {
  57434. return *reinterpret_cast<VkPhysicalDeviceVulkanMemoryModelFeatures*>( this );
  57435. }
  57436. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57437. auto operator<=>( PhysicalDeviceVulkanMemoryModelFeatures const& ) const = default;
  57438. #else
  57439. bool operator==( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  57440. {
  57441. return ( sType == rhs.sType )
  57442. && ( pNext == rhs.pNext )
  57443. && ( vulkanMemoryModel == rhs.vulkanMemoryModel )
  57444. && ( vulkanMemoryModelDeviceScope == rhs.vulkanMemoryModelDeviceScope )
  57445. && ( vulkanMemoryModelAvailabilityVisibilityChains == rhs.vulkanMemoryModelAvailabilityVisibilityChains );
  57446. }
  57447. bool operator!=( PhysicalDeviceVulkanMemoryModelFeatures const& rhs ) const VULKAN_HPP_NOEXCEPT
  57448. {
  57449. return !operator==( rhs );
  57450. }
  57451. #endif
  57452. public:
  57453. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceVulkanMemoryModelFeatures;
  57454. void* pNext = {};
  57455. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModel = {};
  57456. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelDeviceScope = {};
  57457. VULKAN_HPP_NAMESPACE::Bool32 vulkanMemoryModelAvailabilityVisibilityChains = {};
  57458. };
  57459. static_assert( sizeof( PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ), "struct and wrapper have different size!" );
  57460. static_assert( std::is_standard_layout<PhysicalDeviceVulkanMemoryModelFeatures>::value, "struct wrapper is not a standard layout!" );
  57461. template <>
  57462. struct CppType<StructureType, StructureType::ePhysicalDeviceVulkanMemoryModelFeatures>
  57463. {
  57464. using Type = PhysicalDeviceVulkanMemoryModelFeatures;
  57465. };
  57466. using PhysicalDeviceVulkanMemoryModelFeaturesKHR = PhysicalDeviceVulkanMemoryModelFeatures;
  57467. struct PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR
  57468. {
  57469. static const bool allowDuplicate = false;
  57470. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  57471. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57472. VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ = {}, VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ = {}) VULKAN_HPP_NOEXCEPT
  57473. : workgroupMemoryExplicitLayout( workgroupMemoryExplicitLayout_ ), workgroupMemoryExplicitLayoutScalarBlockLayout( workgroupMemoryExplicitLayoutScalarBlockLayout_ ), workgroupMemoryExplicitLayout8BitAccess( workgroupMemoryExplicitLayout8BitAccess_ ), workgroupMemoryExplicitLayout16BitAccess( workgroupMemoryExplicitLayout16BitAccess_ )
  57474. {}
  57475. VULKAN_HPP_CONSTEXPR PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57476. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  57477. : PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR( *reinterpret_cast<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs ) )
  57478. {}
  57479. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57480. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57481. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & operator=( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  57482. {
  57483. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const *>( &rhs );
  57484. return *this;
  57485. }
  57486. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57487. {
  57488. pNext = pNext_;
  57489. return *this;
  57490. }
  57491. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout_ ) VULKAN_HPP_NOEXCEPT
  57492. {
  57493. workgroupMemoryExplicitLayout = workgroupMemoryExplicitLayout_;
  57494. return *this;
  57495. }
  57496. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayoutScalarBlockLayout( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout_ ) VULKAN_HPP_NOEXCEPT
  57497. {
  57498. workgroupMemoryExplicitLayoutScalarBlockLayout = workgroupMemoryExplicitLayoutScalarBlockLayout_;
  57499. return *this;
  57500. }
  57501. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout8BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess_ ) VULKAN_HPP_NOEXCEPT
  57502. {
  57503. workgroupMemoryExplicitLayout8BitAccess = workgroupMemoryExplicitLayout8BitAccess_;
  57504. return *this;
  57505. }
  57506. PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR & setWorkgroupMemoryExplicitLayout16BitAccess( VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess_ ) VULKAN_HPP_NOEXCEPT
  57507. {
  57508. workgroupMemoryExplicitLayout16BitAccess = workgroupMemoryExplicitLayout16BitAccess_;
  57509. return *this;
  57510. }
  57511. operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  57512. {
  57513. return *reinterpret_cast<const VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
  57514. }
  57515. operator VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  57516. {
  57517. return *reinterpret_cast<VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR*>( this );
  57518. }
  57519. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57520. auto operator<=>( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const& ) const = default;
  57521. #else
  57522. bool operator==( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  57523. {
  57524. return ( sType == rhs.sType )
  57525. && ( pNext == rhs.pNext )
  57526. && ( workgroupMemoryExplicitLayout == rhs.workgroupMemoryExplicitLayout )
  57527. && ( workgroupMemoryExplicitLayoutScalarBlockLayout == rhs.workgroupMemoryExplicitLayoutScalarBlockLayout )
  57528. && ( workgroupMemoryExplicitLayout8BitAccess == rhs.workgroupMemoryExplicitLayout8BitAccess )
  57529. && ( workgroupMemoryExplicitLayout16BitAccess == rhs.workgroupMemoryExplicitLayout16BitAccess );
  57530. }
  57531. bool operator!=( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  57532. {
  57533. return !operator==( rhs );
  57534. }
  57535. #endif
  57536. public:
  57537. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  57538. void* pNext = {};
  57539. VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout = {};
  57540. VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayoutScalarBlockLayout = {};
  57541. VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout8BitAccess = {};
  57542. VULKAN_HPP_NAMESPACE::Bool32 workgroupMemoryExplicitLayout16BitAccess = {};
  57543. };
  57544. static_assert( sizeof( PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) == sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ), "struct and wrapper have different size!" );
  57545. static_assert( std::is_standard_layout<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  57546. template <>
  57547. struct CppType<StructureType, StructureType::ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>
  57548. {
  57549. using Type = PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
  57550. };
  57551. struct PhysicalDeviceYcbcrImageArraysFeaturesEXT
  57552. {
  57553. static const bool allowDuplicate = false;
  57554. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
  57555. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57556. VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT(VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ = {}) VULKAN_HPP_NOEXCEPT
  57557. : ycbcrImageArrays( ycbcrImageArrays_ )
  57558. {}
  57559. VULKAN_HPP_CONSTEXPR PhysicalDeviceYcbcrImageArraysFeaturesEXT( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57560. PhysicalDeviceYcbcrImageArraysFeaturesEXT( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  57561. : PhysicalDeviceYcbcrImageArraysFeaturesEXT( *reinterpret_cast<PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs ) )
  57562. {}
  57563. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57564. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57565. PhysicalDeviceYcbcrImageArraysFeaturesEXT & operator=( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  57566. {
  57567. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT const *>( &rhs );
  57568. return *this;
  57569. }
  57570. PhysicalDeviceYcbcrImageArraysFeaturesEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57571. {
  57572. pNext = pNext_;
  57573. return *this;
  57574. }
  57575. PhysicalDeviceYcbcrImageArraysFeaturesEXT & setYcbcrImageArrays( VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays_ ) VULKAN_HPP_NOEXCEPT
  57576. {
  57577. ycbcrImageArrays = ycbcrImageArrays_;
  57578. return *this;
  57579. }
  57580. operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  57581. {
  57582. return *reinterpret_cast<const VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
  57583. }
  57584. operator VkPhysicalDeviceYcbcrImageArraysFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  57585. {
  57586. return *reinterpret_cast<VkPhysicalDeviceYcbcrImageArraysFeaturesEXT*>( this );
  57587. }
  57588. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57589. auto operator<=>( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& ) const = default;
  57590. #else
  57591. bool operator==( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  57592. {
  57593. return ( sType == rhs.sType )
  57594. && ( pNext == rhs.pNext )
  57595. && ( ycbcrImageArrays == rhs.ycbcrImageArrays );
  57596. }
  57597. bool operator!=( PhysicalDeviceYcbcrImageArraysFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  57598. {
  57599. return !operator==( rhs );
  57600. }
  57601. #endif
  57602. public:
  57603. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT;
  57604. void* pNext = {};
  57605. VULKAN_HPP_NAMESPACE::Bool32 ycbcrImageArrays = {};
  57606. };
  57607. static_assert( sizeof( PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ), "struct and wrapper have different size!" );
  57608. static_assert( std::is_standard_layout<PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  57609. template <>
  57610. struct CppType<StructureType, StructureType::ePhysicalDeviceYcbcrImageArraysFeaturesEXT>
  57611. {
  57612. using Type = PhysicalDeviceYcbcrImageArraysFeaturesEXT;
  57613. };
  57614. struct PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR
  57615. {
  57616. static const bool allowDuplicate = false;
  57617. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
  57618. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57619. VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR(VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ = {}) VULKAN_HPP_NOEXCEPT
  57620. : shaderZeroInitializeWorkgroupMemory( shaderZeroInitializeWorkgroupMemory_ )
  57621. {}
  57622. VULKAN_HPP_CONSTEXPR PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57623. PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  57624. : PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR( *reinterpret_cast<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const *>( &rhs ) )
  57625. {}
  57626. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57627. VULKAN_HPP_CONSTEXPR_14 PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & operator=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57628. PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & operator=( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  57629. {
  57630. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const *>( &rhs );
  57631. return *this;
  57632. }
  57633. PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57634. {
  57635. pNext = pNext_;
  57636. return *this;
  57637. }
  57638. PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR & setShaderZeroInitializeWorkgroupMemory( VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory_ ) VULKAN_HPP_NOEXCEPT
  57639. {
  57640. shaderZeroInitializeWorkgroupMemory = shaderZeroInitializeWorkgroupMemory_;
  57641. return *this;
  57642. }
  57643. operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const&() const VULKAN_HPP_NOEXCEPT
  57644. {
  57645. return *reinterpret_cast<const VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR*>( this );
  57646. }
  57647. operator VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR &() VULKAN_HPP_NOEXCEPT
  57648. {
  57649. return *reinterpret_cast<VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR*>( this );
  57650. }
  57651. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57652. auto operator<=>( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const& ) const = default;
  57653. #else
  57654. bool operator==( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  57655. {
  57656. return ( sType == rhs.sType )
  57657. && ( pNext == rhs.pNext )
  57658. && ( shaderZeroInitializeWorkgroupMemory == rhs.shaderZeroInitializeWorkgroupMemory );
  57659. }
  57660. bool operator!=( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  57661. {
  57662. return !operator==( rhs );
  57663. }
  57664. #endif
  57665. public:
  57666. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
  57667. void* pNext = {};
  57668. VULKAN_HPP_NAMESPACE::Bool32 shaderZeroInitializeWorkgroupMemory = {};
  57669. };
  57670. static_assert( sizeof( PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ) == sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR ), "struct and wrapper have different size!" );
  57671. static_assert( std::is_standard_layout<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
  57672. template <>
  57673. struct CppType<StructureType, StructureType::ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR>
  57674. {
  57675. using Type = PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
  57676. };
  57677. struct PipelineColorBlendAdvancedStateCreateInfoEXT
  57678. {
  57679. static const bool allowDuplicate = false;
  57680. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
  57681. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57682. VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ = {}, VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated) VULKAN_HPP_NOEXCEPT
  57683. : srcPremultiplied( srcPremultiplied_ ), dstPremultiplied( dstPremultiplied_ ), blendOverlap( blendOverlap_ )
  57684. {}
  57685. VULKAN_HPP_CONSTEXPR PipelineColorBlendAdvancedStateCreateInfoEXT( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57686. PipelineColorBlendAdvancedStateCreateInfoEXT( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  57687. : PipelineColorBlendAdvancedStateCreateInfoEXT( *reinterpret_cast<PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs ) )
  57688. {}
  57689. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57690. VULKAN_HPP_CONSTEXPR_14 PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( PipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57691. PipelineColorBlendAdvancedStateCreateInfoEXT & operator=( VkPipelineColorBlendAdvancedStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  57692. {
  57693. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT const *>( &rhs );
  57694. return *this;
  57695. }
  57696. PipelineColorBlendAdvancedStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57697. {
  57698. pNext = pNext_;
  57699. return *this;
  57700. }
  57701. PipelineColorBlendAdvancedStateCreateInfoEXT & setSrcPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied_ ) VULKAN_HPP_NOEXCEPT
  57702. {
  57703. srcPremultiplied = srcPremultiplied_;
  57704. return *this;
  57705. }
  57706. PipelineColorBlendAdvancedStateCreateInfoEXT & setDstPremultiplied( VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied_ ) VULKAN_HPP_NOEXCEPT
  57707. {
  57708. dstPremultiplied = dstPremultiplied_;
  57709. return *this;
  57710. }
  57711. PipelineColorBlendAdvancedStateCreateInfoEXT & setBlendOverlap( VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap_ ) VULKAN_HPP_NOEXCEPT
  57712. {
  57713. blendOverlap = blendOverlap_;
  57714. return *this;
  57715. }
  57716. operator VkPipelineColorBlendAdvancedStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  57717. {
  57718. return *reinterpret_cast<const VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
  57719. }
  57720. operator VkPipelineColorBlendAdvancedStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  57721. {
  57722. return *reinterpret_cast<VkPipelineColorBlendAdvancedStateCreateInfoEXT*>( this );
  57723. }
  57724. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57725. auto operator<=>( PipelineColorBlendAdvancedStateCreateInfoEXT const& ) const = default;
  57726. #else
  57727. bool operator==( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  57728. {
  57729. return ( sType == rhs.sType )
  57730. && ( pNext == rhs.pNext )
  57731. && ( srcPremultiplied == rhs.srcPremultiplied )
  57732. && ( dstPremultiplied == rhs.dstPremultiplied )
  57733. && ( blendOverlap == rhs.blendOverlap );
  57734. }
  57735. bool operator!=( PipelineColorBlendAdvancedStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  57736. {
  57737. return !operator==( rhs );
  57738. }
  57739. #endif
  57740. public:
  57741. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT;
  57742. const void* pNext = {};
  57743. VULKAN_HPP_NAMESPACE::Bool32 srcPremultiplied = {};
  57744. VULKAN_HPP_NAMESPACE::Bool32 dstPremultiplied = {};
  57745. VULKAN_HPP_NAMESPACE::BlendOverlapEXT blendOverlap = VULKAN_HPP_NAMESPACE::BlendOverlapEXT::eUncorrelated;
  57746. };
  57747. static_assert( sizeof( PipelineColorBlendAdvancedStateCreateInfoEXT ) == sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ), "struct and wrapper have different size!" );
  57748. static_assert( std::is_standard_layout<PipelineColorBlendAdvancedStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  57749. template <>
  57750. struct CppType<StructureType, StructureType::ePipelineColorBlendAdvancedStateCreateInfoEXT>
  57751. {
  57752. using Type = PipelineColorBlendAdvancedStateCreateInfoEXT;
  57753. };
  57754. struct PipelineCompilerControlCreateInfoAMD
  57755. {
  57756. static const bool allowDuplicate = false;
  57757. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCompilerControlCreateInfoAMD;
  57758. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57759. VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD(VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ = {}) VULKAN_HPP_NOEXCEPT
  57760. : compilerControlFlags( compilerControlFlags_ )
  57761. {}
  57762. VULKAN_HPP_CONSTEXPR PipelineCompilerControlCreateInfoAMD( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57763. PipelineCompilerControlCreateInfoAMD( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  57764. : PipelineCompilerControlCreateInfoAMD( *reinterpret_cast<PipelineCompilerControlCreateInfoAMD const *>( &rhs ) )
  57765. {}
  57766. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57767. VULKAN_HPP_CONSTEXPR_14 PipelineCompilerControlCreateInfoAMD & operator=( PipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57768. PipelineCompilerControlCreateInfoAMD & operator=( VkPipelineCompilerControlCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  57769. {
  57770. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD const *>( &rhs );
  57771. return *this;
  57772. }
  57773. PipelineCompilerControlCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57774. {
  57775. pNext = pNext_;
  57776. return *this;
  57777. }
  57778. PipelineCompilerControlCreateInfoAMD & setCompilerControlFlags( VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags_ ) VULKAN_HPP_NOEXCEPT
  57779. {
  57780. compilerControlFlags = compilerControlFlags_;
  57781. return *this;
  57782. }
  57783. operator VkPipelineCompilerControlCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
  57784. {
  57785. return *reinterpret_cast<const VkPipelineCompilerControlCreateInfoAMD*>( this );
  57786. }
  57787. operator VkPipelineCompilerControlCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
  57788. {
  57789. return *reinterpret_cast<VkPipelineCompilerControlCreateInfoAMD*>( this );
  57790. }
  57791. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57792. auto operator<=>( PipelineCompilerControlCreateInfoAMD const& ) const = default;
  57793. #else
  57794. bool operator==( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  57795. {
  57796. return ( sType == rhs.sType )
  57797. && ( pNext == rhs.pNext )
  57798. && ( compilerControlFlags == rhs.compilerControlFlags );
  57799. }
  57800. bool operator!=( PipelineCompilerControlCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  57801. {
  57802. return !operator==( rhs );
  57803. }
  57804. #endif
  57805. public:
  57806. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCompilerControlCreateInfoAMD;
  57807. const void* pNext = {};
  57808. VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD compilerControlFlags = {};
  57809. };
  57810. static_assert( sizeof( PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ), "struct and wrapper have different size!" );
  57811. static_assert( std::is_standard_layout<PipelineCompilerControlCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
  57812. template <>
  57813. struct CppType<StructureType, StructureType::ePipelineCompilerControlCreateInfoAMD>
  57814. {
  57815. using Type = PipelineCompilerControlCreateInfoAMD;
  57816. };
  57817. struct PipelineCoverageModulationStateCreateInfoNV
  57818. {
  57819. static const bool allowDuplicate = false;
  57820. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
  57821. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57822. VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ = {}, uint32_t coverageModulationTableCount_ = {}, const float* pCoverageModulationTable_ = {}) VULKAN_HPP_NOEXCEPT
  57823. : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( coverageModulationTableCount_ ), pCoverageModulationTable( pCoverageModulationTable_ )
  57824. {}
  57825. VULKAN_HPP_CONSTEXPR PipelineCoverageModulationStateCreateInfoNV( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57826. PipelineCoverageModulationStateCreateInfoNV( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  57827. : PipelineCoverageModulationStateCreateInfoNV( *reinterpret_cast<PipelineCoverageModulationStateCreateInfoNV const *>( &rhs ) )
  57828. {}
  57829. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  57830. PipelineCoverageModulationStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_, VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ )
  57831. : flags( flags_ ), coverageModulationMode( coverageModulationMode_ ), coverageModulationTableEnable( coverageModulationTableEnable_ ), coverageModulationTableCount( static_cast<uint32_t>( coverageModulationTable_.size() ) ), pCoverageModulationTable( coverageModulationTable_.data() )
  57832. {}
  57833. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  57834. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57835. VULKAN_HPP_CONSTEXPR_14 PipelineCoverageModulationStateCreateInfoNV & operator=( PipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57836. PipelineCoverageModulationStateCreateInfoNV & operator=( VkPipelineCoverageModulationStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  57837. {
  57838. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV const *>( &rhs );
  57839. return *this;
  57840. }
  57841. PipelineCoverageModulationStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57842. {
  57843. pNext = pNext_;
  57844. return *this;
  57845. }
  57846. PipelineCoverageModulationStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  57847. {
  57848. flags = flags_;
  57849. return *this;
  57850. }
  57851. PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationMode( VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode_ ) VULKAN_HPP_NOEXCEPT
  57852. {
  57853. coverageModulationMode = coverageModulationMode_;
  57854. return *this;
  57855. }
  57856. PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable_ ) VULKAN_HPP_NOEXCEPT
  57857. {
  57858. coverageModulationTableEnable = coverageModulationTableEnable_;
  57859. return *this;
  57860. }
  57861. PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTableCount( uint32_t coverageModulationTableCount_ ) VULKAN_HPP_NOEXCEPT
  57862. {
  57863. coverageModulationTableCount = coverageModulationTableCount_;
  57864. return *this;
  57865. }
  57866. PipelineCoverageModulationStateCreateInfoNV & setPCoverageModulationTable( const float* pCoverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
  57867. {
  57868. pCoverageModulationTable = pCoverageModulationTable_;
  57869. return *this;
  57870. }
  57871. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  57872. PipelineCoverageModulationStateCreateInfoNV & setCoverageModulationTable( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const float> const & coverageModulationTable_ ) VULKAN_HPP_NOEXCEPT
  57873. {
  57874. coverageModulationTableCount = static_cast<uint32_t>( coverageModulationTable_.size() );
  57875. pCoverageModulationTable = coverageModulationTable_.data();
  57876. return *this;
  57877. }
  57878. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  57879. operator VkPipelineCoverageModulationStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  57880. {
  57881. return *reinterpret_cast<const VkPipelineCoverageModulationStateCreateInfoNV*>( this );
  57882. }
  57883. operator VkPipelineCoverageModulationStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  57884. {
  57885. return *reinterpret_cast<VkPipelineCoverageModulationStateCreateInfoNV*>( this );
  57886. }
  57887. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57888. auto operator<=>( PipelineCoverageModulationStateCreateInfoNV const& ) const = default;
  57889. #else
  57890. bool operator==( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  57891. {
  57892. return ( sType == rhs.sType )
  57893. && ( pNext == rhs.pNext )
  57894. && ( flags == rhs.flags )
  57895. && ( coverageModulationMode == rhs.coverageModulationMode )
  57896. && ( coverageModulationTableEnable == rhs.coverageModulationTableEnable )
  57897. && ( coverageModulationTableCount == rhs.coverageModulationTableCount )
  57898. && ( pCoverageModulationTable == rhs.pCoverageModulationTable );
  57899. }
  57900. bool operator!=( PipelineCoverageModulationStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  57901. {
  57902. return !operator==( rhs );
  57903. }
  57904. #endif
  57905. public:
  57906. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageModulationStateCreateInfoNV;
  57907. const void* pNext = {};
  57908. VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV flags = {};
  57909. VULKAN_HPP_NAMESPACE::CoverageModulationModeNV coverageModulationMode = VULKAN_HPP_NAMESPACE::CoverageModulationModeNV::eNone;
  57910. VULKAN_HPP_NAMESPACE::Bool32 coverageModulationTableEnable = {};
  57911. uint32_t coverageModulationTableCount = {};
  57912. const float* pCoverageModulationTable = {};
  57913. };
  57914. static_assert( sizeof( PipelineCoverageModulationStateCreateInfoNV ) == sizeof( VkPipelineCoverageModulationStateCreateInfoNV ), "struct and wrapper have different size!" );
  57915. static_assert( std::is_standard_layout<PipelineCoverageModulationStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  57916. template <>
  57917. struct CppType<StructureType, StructureType::ePipelineCoverageModulationStateCreateInfoNV>
  57918. {
  57919. using Type = PipelineCoverageModulationStateCreateInfoNV;
  57920. };
  57921. struct PipelineCoverageReductionStateCreateInfoNV
  57922. {
  57923. static const bool allowDuplicate = false;
  57924. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
  57925. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57926. VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge) VULKAN_HPP_NOEXCEPT
  57927. : flags( flags_ ), coverageReductionMode( coverageReductionMode_ )
  57928. {}
  57929. VULKAN_HPP_CONSTEXPR PipelineCoverageReductionStateCreateInfoNV( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57930. PipelineCoverageReductionStateCreateInfoNV( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  57931. : PipelineCoverageReductionStateCreateInfoNV( *reinterpret_cast<PipelineCoverageReductionStateCreateInfoNV const *>( &rhs ) )
  57932. {}
  57933. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57934. VULKAN_HPP_CONSTEXPR_14 PipelineCoverageReductionStateCreateInfoNV & operator=( PipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  57935. PipelineCoverageReductionStateCreateInfoNV & operator=( VkPipelineCoverageReductionStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  57936. {
  57937. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV const *>( &rhs );
  57938. return *this;
  57939. }
  57940. PipelineCoverageReductionStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  57941. {
  57942. pNext = pNext_;
  57943. return *this;
  57944. }
  57945. PipelineCoverageReductionStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  57946. {
  57947. flags = flags_;
  57948. return *this;
  57949. }
  57950. PipelineCoverageReductionStateCreateInfoNV & setCoverageReductionMode( VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode_ ) VULKAN_HPP_NOEXCEPT
  57951. {
  57952. coverageReductionMode = coverageReductionMode_;
  57953. return *this;
  57954. }
  57955. operator VkPipelineCoverageReductionStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  57956. {
  57957. return *reinterpret_cast<const VkPipelineCoverageReductionStateCreateInfoNV*>( this );
  57958. }
  57959. operator VkPipelineCoverageReductionStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  57960. {
  57961. return *reinterpret_cast<VkPipelineCoverageReductionStateCreateInfoNV*>( this );
  57962. }
  57963. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  57964. auto operator<=>( PipelineCoverageReductionStateCreateInfoNV const& ) const = default;
  57965. #else
  57966. bool operator==( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  57967. {
  57968. return ( sType == rhs.sType )
  57969. && ( pNext == rhs.pNext )
  57970. && ( flags == rhs.flags )
  57971. && ( coverageReductionMode == rhs.coverageReductionMode );
  57972. }
  57973. bool operator!=( PipelineCoverageReductionStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  57974. {
  57975. return !operator==( rhs );
  57976. }
  57977. #endif
  57978. public:
  57979. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageReductionStateCreateInfoNV;
  57980. const void* pNext = {};
  57981. VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV flags = {};
  57982. VULKAN_HPP_NAMESPACE::CoverageReductionModeNV coverageReductionMode = VULKAN_HPP_NAMESPACE::CoverageReductionModeNV::eMerge;
  57983. };
  57984. static_assert( sizeof( PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ), "struct and wrapper have different size!" );
  57985. static_assert( std::is_standard_layout<PipelineCoverageReductionStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  57986. template <>
  57987. struct CppType<StructureType, StructureType::ePipelineCoverageReductionStateCreateInfoNV>
  57988. {
  57989. using Type = PipelineCoverageReductionStateCreateInfoNV;
  57990. };
  57991. struct PipelineCoverageToColorStateCreateInfoNV
  57992. {
  57993. static const bool allowDuplicate = false;
  57994. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
  57995. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  57996. VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ = {}, uint32_t coverageToColorLocation_ = {}) VULKAN_HPP_NOEXCEPT
  57997. : flags( flags_ ), coverageToColorEnable( coverageToColorEnable_ ), coverageToColorLocation( coverageToColorLocation_ )
  57998. {}
  57999. VULKAN_HPP_CONSTEXPR PipelineCoverageToColorStateCreateInfoNV( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58000. PipelineCoverageToColorStateCreateInfoNV( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58001. : PipelineCoverageToColorStateCreateInfoNV( *reinterpret_cast<PipelineCoverageToColorStateCreateInfoNV const *>( &rhs ) )
  58002. {}
  58003. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58004. VULKAN_HPP_CONSTEXPR_14 PipelineCoverageToColorStateCreateInfoNV & operator=( PipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58005. PipelineCoverageToColorStateCreateInfoNV & operator=( VkPipelineCoverageToColorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58006. {
  58007. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV const *>( &rhs );
  58008. return *this;
  58009. }
  58010. PipelineCoverageToColorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58011. {
  58012. pNext = pNext_;
  58013. return *this;
  58014. }
  58015. PipelineCoverageToColorStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  58016. {
  58017. flags = flags_;
  58018. return *this;
  58019. }
  58020. PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorEnable( VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable_ ) VULKAN_HPP_NOEXCEPT
  58021. {
  58022. coverageToColorEnable = coverageToColorEnable_;
  58023. return *this;
  58024. }
  58025. PipelineCoverageToColorStateCreateInfoNV & setCoverageToColorLocation( uint32_t coverageToColorLocation_ ) VULKAN_HPP_NOEXCEPT
  58026. {
  58027. coverageToColorLocation = coverageToColorLocation_;
  58028. return *this;
  58029. }
  58030. operator VkPipelineCoverageToColorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  58031. {
  58032. return *reinterpret_cast<const VkPipelineCoverageToColorStateCreateInfoNV*>( this );
  58033. }
  58034. operator VkPipelineCoverageToColorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  58035. {
  58036. return *reinterpret_cast<VkPipelineCoverageToColorStateCreateInfoNV*>( this );
  58037. }
  58038. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58039. auto operator<=>( PipelineCoverageToColorStateCreateInfoNV const& ) const = default;
  58040. #else
  58041. bool operator==( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58042. {
  58043. return ( sType == rhs.sType )
  58044. && ( pNext == rhs.pNext )
  58045. && ( flags == rhs.flags )
  58046. && ( coverageToColorEnable == rhs.coverageToColorEnable )
  58047. && ( coverageToColorLocation == rhs.coverageToColorLocation );
  58048. }
  58049. bool operator!=( PipelineCoverageToColorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58050. {
  58051. return !operator==( rhs );
  58052. }
  58053. #endif
  58054. public:
  58055. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCoverageToColorStateCreateInfoNV;
  58056. const void* pNext = {};
  58057. VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV flags = {};
  58058. VULKAN_HPP_NAMESPACE::Bool32 coverageToColorEnable = {};
  58059. uint32_t coverageToColorLocation = {};
  58060. };
  58061. static_assert( sizeof( PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ), "struct and wrapper have different size!" );
  58062. static_assert( std::is_standard_layout<PipelineCoverageToColorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  58063. template <>
  58064. struct CppType<StructureType, StructureType::ePipelineCoverageToColorStateCreateInfoNV>
  58065. {
  58066. using Type = PipelineCoverageToColorStateCreateInfoNV;
  58067. };
  58068. struct PipelineCreationFeedbackEXT
  58069. {
  58070. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58071. VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags_ = {}, uint64_t duration_ = {}) VULKAN_HPP_NOEXCEPT
  58072. : flags( flags_ ), duration( duration_ )
  58073. {}
  58074. VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackEXT( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58075. PipelineCreationFeedbackEXT( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58076. : PipelineCreationFeedbackEXT( *reinterpret_cast<PipelineCreationFeedbackEXT const *>( &rhs ) )
  58077. {}
  58078. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58079. VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackEXT & operator=( PipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58080. PipelineCreationFeedbackEXT & operator=( VkPipelineCreationFeedbackEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58081. {
  58082. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT const *>( &rhs );
  58083. return *this;
  58084. }
  58085. operator VkPipelineCreationFeedbackEXT const&() const VULKAN_HPP_NOEXCEPT
  58086. {
  58087. return *reinterpret_cast<const VkPipelineCreationFeedbackEXT*>( this );
  58088. }
  58089. operator VkPipelineCreationFeedbackEXT &() VULKAN_HPP_NOEXCEPT
  58090. {
  58091. return *reinterpret_cast<VkPipelineCreationFeedbackEXT*>( this );
  58092. }
  58093. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58094. auto operator<=>( PipelineCreationFeedbackEXT const& ) const = default;
  58095. #else
  58096. bool operator==( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58097. {
  58098. return ( flags == rhs.flags )
  58099. && ( duration == rhs.duration );
  58100. }
  58101. bool operator!=( PipelineCreationFeedbackEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58102. {
  58103. return !operator==( rhs );
  58104. }
  58105. #endif
  58106. public:
  58107. VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagsEXT flags = {};
  58108. uint64_t duration = {};
  58109. };
  58110. static_assert( sizeof( PipelineCreationFeedbackEXT ) == sizeof( VkPipelineCreationFeedbackEXT ), "struct and wrapper have different size!" );
  58111. static_assert( std::is_standard_layout<PipelineCreationFeedbackEXT>::value, "struct wrapper is not a standard layout!" );
  58112. struct PipelineCreationFeedbackCreateInfoEXT
  58113. {
  58114. static const bool allowDuplicate = false;
  58115. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
  58116. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58117. VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ = {}, uint32_t pipelineStageCreationFeedbackCount_ = {}, VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ = {}) VULKAN_HPP_NOEXCEPT
  58118. : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( pipelineStageCreationFeedbackCount_ ), pPipelineStageCreationFeedbacks( pPipelineStageCreationFeedbacks_ )
  58119. {}
  58120. VULKAN_HPP_CONSTEXPR PipelineCreationFeedbackCreateInfoEXT( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58121. PipelineCreationFeedbackCreateInfoEXT( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58122. : PipelineCreationFeedbackCreateInfoEXT( *reinterpret_cast<PipelineCreationFeedbackCreateInfoEXT const *>( &rhs ) )
  58123. {}
  58124. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58125. PipelineCreationFeedbackCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ )
  58126. : pPipelineCreationFeedback( pPipelineCreationFeedback_ ), pipelineStageCreationFeedbackCount( static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() ) ), pPipelineStageCreationFeedbacks( pipelineStageCreationFeedbacks_.data() )
  58127. {}
  58128. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58129. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58130. VULKAN_HPP_CONSTEXPR_14 PipelineCreationFeedbackCreateInfoEXT & operator=( PipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58131. PipelineCreationFeedbackCreateInfoEXT & operator=( VkPipelineCreationFeedbackCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58132. {
  58133. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT const *>( &rhs );
  58134. return *this;
  58135. }
  58136. PipelineCreationFeedbackCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58137. {
  58138. pNext = pNext_;
  58139. return *this;
  58140. }
  58141. PipelineCreationFeedbackCreateInfoEXT & setPPipelineCreationFeedback( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback_ ) VULKAN_HPP_NOEXCEPT
  58142. {
  58143. pPipelineCreationFeedback = pPipelineCreationFeedback_;
  58144. return *this;
  58145. }
  58146. PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbackCount( uint32_t pipelineStageCreationFeedbackCount_ ) VULKAN_HPP_NOEXCEPT
  58147. {
  58148. pipelineStageCreationFeedbackCount = pipelineStageCreationFeedbackCount_;
  58149. return *this;
  58150. }
  58151. PipelineCreationFeedbackCreateInfoEXT & setPPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
  58152. {
  58153. pPipelineStageCreationFeedbacks = pPipelineStageCreationFeedbacks_;
  58154. return *this;
  58155. }
  58156. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58157. PipelineCreationFeedbackCreateInfoEXT & setPipelineStageCreationFeedbacks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT> const & pipelineStageCreationFeedbacks_ ) VULKAN_HPP_NOEXCEPT
  58158. {
  58159. pipelineStageCreationFeedbackCount = static_cast<uint32_t>( pipelineStageCreationFeedbacks_.size() );
  58160. pPipelineStageCreationFeedbacks = pipelineStageCreationFeedbacks_.data();
  58161. return *this;
  58162. }
  58163. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58164. operator VkPipelineCreationFeedbackCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58165. {
  58166. return *reinterpret_cast<const VkPipelineCreationFeedbackCreateInfoEXT*>( this );
  58167. }
  58168. operator VkPipelineCreationFeedbackCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58169. {
  58170. return *reinterpret_cast<VkPipelineCreationFeedbackCreateInfoEXT*>( this );
  58171. }
  58172. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58173. auto operator<=>( PipelineCreationFeedbackCreateInfoEXT const& ) const = default;
  58174. #else
  58175. bool operator==( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58176. {
  58177. return ( sType == rhs.sType )
  58178. && ( pNext == rhs.pNext )
  58179. && ( pPipelineCreationFeedback == rhs.pPipelineCreationFeedback )
  58180. && ( pipelineStageCreationFeedbackCount == rhs.pipelineStageCreationFeedbackCount )
  58181. && ( pPipelineStageCreationFeedbacks == rhs.pPipelineStageCreationFeedbacks );
  58182. }
  58183. bool operator!=( PipelineCreationFeedbackCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58184. {
  58185. return !operator==( rhs );
  58186. }
  58187. #endif
  58188. public:
  58189. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineCreationFeedbackCreateInfoEXT;
  58190. const void* pNext = {};
  58191. VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineCreationFeedback = {};
  58192. uint32_t pipelineStageCreationFeedbackCount = {};
  58193. VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT* pPipelineStageCreationFeedbacks = {};
  58194. };
  58195. static_assert( sizeof( PipelineCreationFeedbackCreateInfoEXT ) == sizeof( VkPipelineCreationFeedbackCreateInfoEXT ), "struct and wrapper have different size!" );
  58196. static_assert( std::is_standard_layout<PipelineCreationFeedbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58197. template <>
  58198. struct CppType<StructureType, StructureType::ePipelineCreationFeedbackCreateInfoEXT>
  58199. {
  58200. using Type = PipelineCreationFeedbackCreateInfoEXT;
  58201. };
  58202. struct PipelineDiscardRectangleStateCreateInfoEXT
  58203. {
  58204. static const bool allowDuplicate = false;
  58205. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
  58206. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58207. VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive, uint32_t discardRectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ = {}) VULKAN_HPP_NOEXCEPT
  58208. : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( discardRectangleCount_ ), pDiscardRectangles( pDiscardRectangles_ )
  58209. {}
  58210. VULKAN_HPP_CONSTEXPR PipelineDiscardRectangleStateCreateInfoEXT( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58211. PipelineDiscardRectangleStateCreateInfoEXT( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58212. : PipelineDiscardRectangleStateCreateInfoEXT( *reinterpret_cast<PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs ) )
  58213. {}
  58214. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58215. PipelineDiscardRectangleStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_, VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ )
  58216. : flags( flags_ ), discardRectangleMode( discardRectangleMode_ ), discardRectangleCount( static_cast<uint32_t>( discardRectangles_.size() ) ), pDiscardRectangles( discardRectangles_.data() )
  58217. {}
  58218. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58219. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58220. VULKAN_HPP_CONSTEXPR_14 PipelineDiscardRectangleStateCreateInfoEXT & operator=( PipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58221. PipelineDiscardRectangleStateCreateInfoEXT & operator=( VkPipelineDiscardRectangleStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58222. {
  58223. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT const *>( &rhs );
  58224. return *this;
  58225. }
  58226. PipelineDiscardRectangleStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58227. {
  58228. pNext = pNext_;
  58229. return *this;
  58230. }
  58231. PipelineDiscardRectangleStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  58232. {
  58233. flags = flags_;
  58234. return *this;
  58235. }
  58236. PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleMode( VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode_ ) VULKAN_HPP_NOEXCEPT
  58237. {
  58238. discardRectangleMode = discardRectangleMode_;
  58239. return *this;
  58240. }
  58241. PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangleCount( uint32_t discardRectangleCount_ ) VULKAN_HPP_NOEXCEPT
  58242. {
  58243. discardRectangleCount = discardRectangleCount_;
  58244. return *this;
  58245. }
  58246. PipelineDiscardRectangleStateCreateInfoEXT & setPDiscardRectangles( const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles_ ) VULKAN_HPP_NOEXCEPT
  58247. {
  58248. pDiscardRectangles = pDiscardRectangles_;
  58249. return *this;
  58250. }
  58251. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58252. PipelineDiscardRectangleStateCreateInfoEXT & setDiscardRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles_ ) VULKAN_HPP_NOEXCEPT
  58253. {
  58254. discardRectangleCount = static_cast<uint32_t>( discardRectangles_.size() );
  58255. pDiscardRectangles = discardRectangles_.data();
  58256. return *this;
  58257. }
  58258. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  58259. operator VkPipelineDiscardRectangleStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58260. {
  58261. return *reinterpret_cast<const VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
  58262. }
  58263. operator VkPipelineDiscardRectangleStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58264. {
  58265. return *reinterpret_cast<VkPipelineDiscardRectangleStateCreateInfoEXT*>( this );
  58266. }
  58267. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58268. auto operator<=>( PipelineDiscardRectangleStateCreateInfoEXT const& ) const = default;
  58269. #else
  58270. bool operator==( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58271. {
  58272. return ( sType == rhs.sType )
  58273. && ( pNext == rhs.pNext )
  58274. && ( flags == rhs.flags )
  58275. && ( discardRectangleMode == rhs.discardRectangleMode )
  58276. && ( discardRectangleCount == rhs.discardRectangleCount )
  58277. && ( pDiscardRectangles == rhs.pDiscardRectangles );
  58278. }
  58279. bool operator!=( PipelineDiscardRectangleStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58280. {
  58281. return !operator==( rhs );
  58282. }
  58283. #endif
  58284. public:
  58285. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineDiscardRectangleStateCreateInfoEXT;
  58286. const void* pNext = {};
  58287. VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT flags = {};
  58288. VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT discardRectangleMode = VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT::eInclusive;
  58289. uint32_t discardRectangleCount = {};
  58290. const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles = {};
  58291. };
  58292. static_assert( sizeof( PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ), "struct and wrapper have different size!" );
  58293. static_assert( std::is_standard_layout<PipelineDiscardRectangleStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58294. template <>
  58295. struct CppType<StructureType, StructureType::ePipelineDiscardRectangleStateCreateInfoEXT>
  58296. {
  58297. using Type = PipelineDiscardRectangleStateCreateInfoEXT;
  58298. };
  58299. struct PipelineFragmentShadingRateEnumStateCreateInfoNV
  58300. {
  58301. static const bool allowDuplicate = false;
  58302. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
  58303. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58304. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV(VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize, VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const& combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }) VULKAN_HPP_NOEXCEPT
  58305. : shadingRateType( shadingRateType_ ), shadingRate( shadingRate_ ), combinerOps( combinerOps_ )
  58306. {}
  58307. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58308. PipelineFragmentShadingRateEnumStateCreateInfoNV( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58309. : PipelineFragmentShadingRateEnumStateCreateInfoNV( *reinterpret_cast<PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs ) )
  58310. {}
  58311. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58312. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( PipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58313. PipelineFragmentShadingRateEnumStateCreateInfoNV & operator=( VkPipelineFragmentShadingRateEnumStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58314. {
  58315. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV const *>( &rhs );
  58316. return *this;
  58317. }
  58318. PipelineFragmentShadingRateEnumStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58319. {
  58320. pNext = pNext_;
  58321. return *this;
  58322. }
  58323. PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRateType( VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType_ ) VULKAN_HPP_NOEXCEPT
  58324. {
  58325. shadingRateType = shadingRateType_;
  58326. return *this;
  58327. }
  58328. PipelineFragmentShadingRateEnumStateCreateInfoNV & setShadingRate( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate_ ) VULKAN_HPP_NOEXCEPT
  58329. {
  58330. shadingRate = shadingRate_;
  58331. return *this;
  58332. }
  58333. PipelineFragmentShadingRateEnumStateCreateInfoNV & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
  58334. {
  58335. combinerOps = combinerOps_;
  58336. return *this;
  58337. }
  58338. operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  58339. {
  58340. return *reinterpret_cast<const VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
  58341. }
  58342. operator VkPipelineFragmentShadingRateEnumStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  58343. {
  58344. return *reinterpret_cast<VkPipelineFragmentShadingRateEnumStateCreateInfoNV*>( this );
  58345. }
  58346. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58347. auto operator<=>( PipelineFragmentShadingRateEnumStateCreateInfoNV const& ) const = default;
  58348. #else
  58349. bool operator==( PipelineFragmentShadingRateEnumStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58350. {
  58351. return ( sType == rhs.sType )
  58352. && ( pNext == rhs.pNext )
  58353. && ( shadingRateType == rhs.shadingRateType )
  58354. && ( shadingRate == rhs.shadingRate )
  58355. && ( combinerOps == rhs.combinerOps );
  58356. }
  58357. bool operator!=( PipelineFragmentShadingRateEnumStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58358. {
  58359. return !operator==( rhs );
  58360. }
  58361. #endif
  58362. public:
  58363. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV;
  58364. const void* pNext = {};
  58365. VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV shadingRateType = VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV::eFragmentSize;
  58366. VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate = VULKAN_HPP_NAMESPACE::FragmentShadingRateNV::e1InvocationPerPixel;
  58367. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
  58368. };
  58369. static_assert( sizeof( PipelineFragmentShadingRateEnumStateCreateInfoNV ) == sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ), "struct and wrapper have different size!" );
  58370. static_assert( std::is_standard_layout<PipelineFragmentShadingRateEnumStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  58371. template <>
  58372. struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateEnumStateCreateInfoNV>
  58373. {
  58374. using Type = PipelineFragmentShadingRateEnumStateCreateInfoNV;
  58375. };
  58376. struct PipelineFragmentShadingRateStateCreateInfoKHR
  58377. {
  58378. static const bool allowDuplicate = false;
  58379. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
  58380. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58381. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR(VULKAN_HPP_NAMESPACE::Extent2D fragmentSize_ = {}, std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> const& combinerOps_ = { { VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep, VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR::eKeep } }) VULKAN_HPP_NOEXCEPT
  58382. : fragmentSize( fragmentSize_ ), combinerOps( combinerOps_ )
  58383. {}
  58384. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58385. PipelineFragmentShadingRateStateCreateInfoKHR( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  58386. : PipelineFragmentShadingRateStateCreateInfoKHR( *reinterpret_cast<PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs ) )
  58387. {}
  58388. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58389. VULKAN_HPP_CONSTEXPR_14 PipelineFragmentShadingRateStateCreateInfoKHR & operator=( PipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58390. PipelineFragmentShadingRateStateCreateInfoKHR & operator=( VkPipelineFragmentShadingRateStateCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  58391. {
  58392. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR const *>( &rhs );
  58393. return *this;
  58394. }
  58395. PipelineFragmentShadingRateStateCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58396. {
  58397. pNext = pNext_;
  58398. return *this;
  58399. }
  58400. PipelineFragmentShadingRateStateCreateInfoKHR & setFragmentSize( VULKAN_HPP_NAMESPACE::Extent2D const & fragmentSize_ ) VULKAN_HPP_NOEXCEPT
  58401. {
  58402. fragmentSize = fragmentSize_;
  58403. return *this;
  58404. }
  58405. PipelineFragmentShadingRateStateCreateInfoKHR & setCombinerOps( std::array<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR,2> combinerOps_ ) VULKAN_HPP_NOEXCEPT
  58406. {
  58407. combinerOps = combinerOps_;
  58408. return *this;
  58409. }
  58410. operator VkPipelineFragmentShadingRateStateCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  58411. {
  58412. return *reinterpret_cast<const VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
  58413. }
  58414. operator VkPipelineFragmentShadingRateStateCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  58415. {
  58416. return *reinterpret_cast<VkPipelineFragmentShadingRateStateCreateInfoKHR*>( this );
  58417. }
  58418. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58419. auto operator<=>( PipelineFragmentShadingRateStateCreateInfoKHR const& ) const = default;
  58420. #else
  58421. bool operator==( PipelineFragmentShadingRateStateCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  58422. {
  58423. return ( sType == rhs.sType )
  58424. && ( pNext == rhs.pNext )
  58425. && ( fragmentSize == rhs.fragmentSize )
  58426. && ( combinerOps == rhs.combinerOps );
  58427. }
  58428. bool operator!=( PipelineFragmentShadingRateStateCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  58429. {
  58430. return !operator==( rhs );
  58431. }
  58432. #endif
  58433. public:
  58434. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR;
  58435. const void* pNext = {};
  58436. VULKAN_HPP_NAMESPACE::Extent2D fragmentSize = {};
  58437. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR, 2> combinerOps = {};
  58438. };
  58439. static_assert( sizeof( PipelineFragmentShadingRateStateCreateInfoKHR ) == sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ), "struct and wrapper have different size!" );
  58440. static_assert( std::is_standard_layout<PipelineFragmentShadingRateStateCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  58441. template <>
  58442. struct CppType<StructureType, StructureType::ePipelineFragmentShadingRateStateCreateInfoKHR>
  58443. {
  58444. using Type = PipelineFragmentShadingRateStateCreateInfoKHR;
  58445. };
  58446. struct PipelineRasterizationConservativeStateCreateInfoEXT
  58447. {
  58448. static const bool allowDuplicate = false;
  58449. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
  58450. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58451. VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled, float extraPrimitiveOverestimationSize_ = {}) VULKAN_HPP_NOEXCEPT
  58452. : flags( flags_ ), conservativeRasterizationMode( conservativeRasterizationMode_ ), extraPrimitiveOverestimationSize( extraPrimitiveOverestimationSize_ )
  58453. {}
  58454. VULKAN_HPP_CONSTEXPR PipelineRasterizationConservativeStateCreateInfoEXT( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58455. PipelineRasterizationConservativeStateCreateInfoEXT( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58456. : PipelineRasterizationConservativeStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs ) )
  58457. {}
  58458. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58459. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationConservativeStateCreateInfoEXT & operator=( PipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58460. PipelineRasterizationConservativeStateCreateInfoEXT & operator=( VkPipelineRasterizationConservativeStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58461. {
  58462. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT const *>( &rhs );
  58463. return *this;
  58464. }
  58465. PipelineRasterizationConservativeStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58466. {
  58467. pNext = pNext_;
  58468. return *this;
  58469. }
  58470. PipelineRasterizationConservativeStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  58471. {
  58472. flags = flags_;
  58473. return *this;
  58474. }
  58475. PipelineRasterizationConservativeStateCreateInfoEXT & setConservativeRasterizationMode( VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
  58476. {
  58477. conservativeRasterizationMode = conservativeRasterizationMode_;
  58478. return *this;
  58479. }
  58480. PipelineRasterizationConservativeStateCreateInfoEXT & setExtraPrimitiveOverestimationSize( float extraPrimitiveOverestimationSize_ ) VULKAN_HPP_NOEXCEPT
  58481. {
  58482. extraPrimitiveOverestimationSize = extraPrimitiveOverestimationSize_;
  58483. return *this;
  58484. }
  58485. operator VkPipelineRasterizationConservativeStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58486. {
  58487. return *reinterpret_cast<const VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
  58488. }
  58489. operator VkPipelineRasterizationConservativeStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58490. {
  58491. return *reinterpret_cast<VkPipelineRasterizationConservativeStateCreateInfoEXT*>( this );
  58492. }
  58493. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58494. auto operator<=>( PipelineRasterizationConservativeStateCreateInfoEXT const& ) const = default;
  58495. #else
  58496. bool operator==( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58497. {
  58498. return ( sType == rhs.sType )
  58499. && ( pNext == rhs.pNext )
  58500. && ( flags == rhs.flags )
  58501. && ( conservativeRasterizationMode == rhs.conservativeRasterizationMode )
  58502. && ( extraPrimitiveOverestimationSize == rhs.extraPrimitiveOverestimationSize );
  58503. }
  58504. bool operator!=( PipelineRasterizationConservativeStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58505. {
  58506. return !operator==( rhs );
  58507. }
  58508. #endif
  58509. public:
  58510. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT;
  58511. const void* pNext = {};
  58512. VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT flags = {};
  58513. VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT conservativeRasterizationMode = VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT::eDisabled;
  58514. float extraPrimitiveOverestimationSize = {};
  58515. };
  58516. static_assert( sizeof( PipelineRasterizationConservativeStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ), "struct and wrapper have different size!" );
  58517. static_assert( std::is_standard_layout<PipelineRasterizationConservativeStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58518. template <>
  58519. struct CppType<StructureType, StructureType::ePipelineRasterizationConservativeStateCreateInfoEXT>
  58520. {
  58521. using Type = PipelineRasterizationConservativeStateCreateInfoEXT;
  58522. };
  58523. struct PipelineRasterizationDepthClipStateCreateInfoEXT
  58524. {
  58525. static const bool allowDuplicate = false;
  58526. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
  58527. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58528. VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ = {}, VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ = {}) VULKAN_HPP_NOEXCEPT
  58529. : flags( flags_ ), depthClipEnable( depthClipEnable_ )
  58530. {}
  58531. VULKAN_HPP_CONSTEXPR PipelineRasterizationDepthClipStateCreateInfoEXT( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58532. PipelineRasterizationDepthClipStateCreateInfoEXT( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58533. : PipelineRasterizationDepthClipStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs ) )
  58534. {}
  58535. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58536. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( PipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58537. PipelineRasterizationDepthClipStateCreateInfoEXT & operator=( VkPipelineRasterizationDepthClipStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58538. {
  58539. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT const *>( &rhs );
  58540. return *this;
  58541. }
  58542. PipelineRasterizationDepthClipStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58543. {
  58544. pNext = pNext_;
  58545. return *this;
  58546. }
  58547. PipelineRasterizationDepthClipStateCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  58548. {
  58549. flags = flags_;
  58550. return *this;
  58551. }
  58552. PipelineRasterizationDepthClipStateCreateInfoEXT & setDepthClipEnable( VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable_ ) VULKAN_HPP_NOEXCEPT
  58553. {
  58554. depthClipEnable = depthClipEnable_;
  58555. return *this;
  58556. }
  58557. operator VkPipelineRasterizationDepthClipStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58558. {
  58559. return *reinterpret_cast<const VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
  58560. }
  58561. operator VkPipelineRasterizationDepthClipStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58562. {
  58563. return *reinterpret_cast<VkPipelineRasterizationDepthClipStateCreateInfoEXT*>( this );
  58564. }
  58565. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58566. auto operator<=>( PipelineRasterizationDepthClipStateCreateInfoEXT const& ) const = default;
  58567. #else
  58568. bool operator==( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58569. {
  58570. return ( sType == rhs.sType )
  58571. && ( pNext == rhs.pNext )
  58572. && ( flags == rhs.flags )
  58573. && ( depthClipEnable == rhs.depthClipEnable );
  58574. }
  58575. bool operator!=( PipelineRasterizationDepthClipStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58576. {
  58577. return !operator==( rhs );
  58578. }
  58579. #endif
  58580. public:
  58581. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT;
  58582. const void* pNext = {};
  58583. VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT flags = {};
  58584. VULKAN_HPP_NAMESPACE::Bool32 depthClipEnable = {};
  58585. };
  58586. static_assert( sizeof( PipelineRasterizationDepthClipStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ), "struct and wrapper have different size!" );
  58587. static_assert( std::is_standard_layout<PipelineRasterizationDepthClipStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58588. template <>
  58589. struct CppType<StructureType, StructureType::ePipelineRasterizationDepthClipStateCreateInfoEXT>
  58590. {
  58591. using Type = PipelineRasterizationDepthClipStateCreateInfoEXT;
  58592. };
  58593. struct PipelineRasterizationLineStateCreateInfoEXT
  58594. {
  58595. static const bool allowDuplicate = false;
  58596. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
  58597. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58598. VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault, VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ = {}, uint32_t lineStippleFactor_ = {}, uint16_t lineStipplePattern_ = {}) VULKAN_HPP_NOEXCEPT
  58599. : lineRasterizationMode( lineRasterizationMode_ ), stippledLineEnable( stippledLineEnable_ ), lineStippleFactor( lineStippleFactor_ ), lineStipplePattern( lineStipplePattern_ )
  58600. {}
  58601. VULKAN_HPP_CONSTEXPR PipelineRasterizationLineStateCreateInfoEXT( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58602. PipelineRasterizationLineStateCreateInfoEXT( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58603. : PipelineRasterizationLineStateCreateInfoEXT( *reinterpret_cast<PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs ) )
  58604. {}
  58605. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58606. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationLineStateCreateInfoEXT & operator=( PipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58607. PipelineRasterizationLineStateCreateInfoEXT & operator=( VkPipelineRasterizationLineStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58608. {
  58609. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT const *>( &rhs );
  58610. return *this;
  58611. }
  58612. PipelineRasterizationLineStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58613. {
  58614. pNext = pNext_;
  58615. return *this;
  58616. }
  58617. PipelineRasterizationLineStateCreateInfoEXT & setLineRasterizationMode( VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode_ ) VULKAN_HPP_NOEXCEPT
  58618. {
  58619. lineRasterizationMode = lineRasterizationMode_;
  58620. return *this;
  58621. }
  58622. PipelineRasterizationLineStateCreateInfoEXT & setStippledLineEnable( VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable_ ) VULKAN_HPP_NOEXCEPT
  58623. {
  58624. stippledLineEnable = stippledLineEnable_;
  58625. return *this;
  58626. }
  58627. PipelineRasterizationLineStateCreateInfoEXT & setLineStippleFactor( uint32_t lineStippleFactor_ ) VULKAN_HPP_NOEXCEPT
  58628. {
  58629. lineStippleFactor = lineStippleFactor_;
  58630. return *this;
  58631. }
  58632. PipelineRasterizationLineStateCreateInfoEXT & setLineStipplePattern( uint16_t lineStipplePattern_ ) VULKAN_HPP_NOEXCEPT
  58633. {
  58634. lineStipplePattern = lineStipplePattern_;
  58635. return *this;
  58636. }
  58637. operator VkPipelineRasterizationLineStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58638. {
  58639. return *reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
  58640. }
  58641. operator VkPipelineRasterizationLineStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58642. {
  58643. return *reinterpret_cast<VkPipelineRasterizationLineStateCreateInfoEXT*>( this );
  58644. }
  58645. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58646. auto operator<=>( PipelineRasterizationLineStateCreateInfoEXT const& ) const = default;
  58647. #else
  58648. bool operator==( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58649. {
  58650. return ( sType == rhs.sType )
  58651. && ( pNext == rhs.pNext )
  58652. && ( lineRasterizationMode == rhs.lineRasterizationMode )
  58653. && ( stippledLineEnable == rhs.stippledLineEnable )
  58654. && ( lineStippleFactor == rhs.lineStippleFactor )
  58655. && ( lineStipplePattern == rhs.lineStipplePattern );
  58656. }
  58657. bool operator!=( PipelineRasterizationLineStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58658. {
  58659. return !operator==( rhs );
  58660. }
  58661. #endif
  58662. public:
  58663. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationLineStateCreateInfoEXT;
  58664. const void* pNext = {};
  58665. VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT lineRasterizationMode = VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT::eDefault;
  58666. VULKAN_HPP_NAMESPACE::Bool32 stippledLineEnable = {};
  58667. uint32_t lineStippleFactor = {};
  58668. uint16_t lineStipplePattern = {};
  58669. };
  58670. static_assert( sizeof( PipelineRasterizationLineStateCreateInfoEXT ) == sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ), "struct and wrapper have different size!" );
  58671. static_assert( std::is_standard_layout<PipelineRasterizationLineStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58672. template <>
  58673. struct CppType<StructureType, StructureType::ePipelineRasterizationLineStateCreateInfoEXT>
  58674. {
  58675. using Type = PipelineRasterizationLineStateCreateInfoEXT;
  58676. };
  58677. struct PipelineRasterizationStateRasterizationOrderAMD
  58678. {
  58679. static const bool allowDuplicate = false;
  58680. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
  58681. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58682. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD(VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict) VULKAN_HPP_NOEXCEPT
  58683. : rasterizationOrder( rasterizationOrder_ )
  58684. {}
  58685. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateRasterizationOrderAMD( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58686. PipelineRasterizationStateRasterizationOrderAMD( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  58687. : PipelineRasterizationStateRasterizationOrderAMD( *reinterpret_cast<PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs ) )
  58688. {}
  58689. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58690. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateRasterizationOrderAMD & operator=( PipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58691. PipelineRasterizationStateRasterizationOrderAMD & operator=( VkPipelineRasterizationStateRasterizationOrderAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  58692. {
  58693. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD const *>( &rhs );
  58694. return *this;
  58695. }
  58696. PipelineRasterizationStateRasterizationOrderAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58697. {
  58698. pNext = pNext_;
  58699. return *this;
  58700. }
  58701. PipelineRasterizationStateRasterizationOrderAMD & setRasterizationOrder( VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder_ ) VULKAN_HPP_NOEXCEPT
  58702. {
  58703. rasterizationOrder = rasterizationOrder_;
  58704. return *this;
  58705. }
  58706. operator VkPipelineRasterizationStateRasterizationOrderAMD const&() const VULKAN_HPP_NOEXCEPT
  58707. {
  58708. return *reinterpret_cast<const VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
  58709. }
  58710. operator VkPipelineRasterizationStateRasterizationOrderAMD &() VULKAN_HPP_NOEXCEPT
  58711. {
  58712. return *reinterpret_cast<VkPipelineRasterizationStateRasterizationOrderAMD*>( this );
  58713. }
  58714. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58715. auto operator<=>( PipelineRasterizationStateRasterizationOrderAMD const& ) const = default;
  58716. #else
  58717. bool operator==( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  58718. {
  58719. return ( sType == rhs.sType )
  58720. && ( pNext == rhs.pNext )
  58721. && ( rasterizationOrder == rhs.rasterizationOrder );
  58722. }
  58723. bool operator!=( PipelineRasterizationStateRasterizationOrderAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  58724. {
  58725. return !operator==( rhs );
  58726. }
  58727. #endif
  58728. public:
  58729. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateRasterizationOrderAMD;
  58730. const void* pNext = {};
  58731. VULKAN_HPP_NAMESPACE::RasterizationOrderAMD rasterizationOrder = VULKAN_HPP_NAMESPACE::RasterizationOrderAMD::eStrict;
  58732. };
  58733. static_assert( sizeof( PipelineRasterizationStateRasterizationOrderAMD ) == sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ), "struct and wrapper have different size!" );
  58734. static_assert( std::is_standard_layout<PipelineRasterizationStateRasterizationOrderAMD>::value, "struct wrapper is not a standard layout!" );
  58735. template <>
  58736. struct CppType<StructureType, StructureType::ePipelineRasterizationStateRasterizationOrderAMD>
  58737. {
  58738. using Type = PipelineRasterizationStateRasterizationOrderAMD;
  58739. };
  58740. struct PipelineRasterizationStateStreamCreateInfoEXT
  58741. {
  58742. static const bool allowDuplicate = false;
  58743. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
  58744. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58745. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT(VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ = {}, uint32_t rasterizationStream_ = {}) VULKAN_HPP_NOEXCEPT
  58746. : flags( flags_ ), rasterizationStream( rasterizationStream_ )
  58747. {}
  58748. VULKAN_HPP_CONSTEXPR PipelineRasterizationStateStreamCreateInfoEXT( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58749. PipelineRasterizationStateStreamCreateInfoEXT( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58750. : PipelineRasterizationStateStreamCreateInfoEXT( *reinterpret_cast<PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs ) )
  58751. {}
  58752. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58753. VULKAN_HPP_CONSTEXPR_14 PipelineRasterizationStateStreamCreateInfoEXT & operator=( PipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58754. PipelineRasterizationStateStreamCreateInfoEXT & operator=( VkPipelineRasterizationStateStreamCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58755. {
  58756. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT const *>( &rhs );
  58757. return *this;
  58758. }
  58759. PipelineRasterizationStateStreamCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58760. {
  58761. pNext = pNext_;
  58762. return *this;
  58763. }
  58764. PipelineRasterizationStateStreamCreateInfoEXT & setFlags( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags_ ) VULKAN_HPP_NOEXCEPT
  58765. {
  58766. flags = flags_;
  58767. return *this;
  58768. }
  58769. PipelineRasterizationStateStreamCreateInfoEXT & setRasterizationStream( uint32_t rasterizationStream_ ) VULKAN_HPP_NOEXCEPT
  58770. {
  58771. rasterizationStream = rasterizationStream_;
  58772. return *this;
  58773. }
  58774. operator VkPipelineRasterizationStateStreamCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58775. {
  58776. return *reinterpret_cast<const VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
  58777. }
  58778. operator VkPipelineRasterizationStateStreamCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58779. {
  58780. return *reinterpret_cast<VkPipelineRasterizationStateStreamCreateInfoEXT*>( this );
  58781. }
  58782. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58783. auto operator<=>( PipelineRasterizationStateStreamCreateInfoEXT const& ) const = default;
  58784. #else
  58785. bool operator==( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58786. {
  58787. return ( sType == rhs.sType )
  58788. && ( pNext == rhs.pNext )
  58789. && ( flags == rhs.flags )
  58790. && ( rasterizationStream == rhs.rasterizationStream );
  58791. }
  58792. bool operator!=( PipelineRasterizationStateStreamCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58793. {
  58794. return !operator==( rhs );
  58795. }
  58796. #endif
  58797. public:
  58798. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRasterizationStateStreamCreateInfoEXT;
  58799. const void* pNext = {};
  58800. VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT flags = {};
  58801. uint32_t rasterizationStream = {};
  58802. };
  58803. static_assert( sizeof( PipelineRasterizationStateStreamCreateInfoEXT ) == sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ), "struct and wrapper have different size!" );
  58804. static_assert( std::is_standard_layout<PipelineRasterizationStateStreamCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58805. template <>
  58806. struct CppType<StructureType, StructureType::ePipelineRasterizationStateStreamCreateInfoEXT>
  58807. {
  58808. using Type = PipelineRasterizationStateStreamCreateInfoEXT;
  58809. };
  58810. struct PipelineRepresentativeFragmentTestStateCreateInfoNV
  58811. {
  58812. static const bool allowDuplicate = false;
  58813. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
  58814. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58815. VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ = {}) VULKAN_HPP_NOEXCEPT
  58816. : representativeFragmentTestEnable( representativeFragmentTestEnable_ )
  58817. {}
  58818. VULKAN_HPP_CONSTEXPR PipelineRepresentativeFragmentTestStateCreateInfoNV( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58819. PipelineRepresentativeFragmentTestStateCreateInfoNV( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58820. : PipelineRepresentativeFragmentTestStateCreateInfoNV( *reinterpret_cast<PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs ) )
  58821. {}
  58822. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58823. VULKAN_HPP_CONSTEXPR_14 PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( PipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58824. PipelineRepresentativeFragmentTestStateCreateInfoNV & operator=( VkPipelineRepresentativeFragmentTestStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  58825. {
  58826. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV const *>( &rhs );
  58827. return *this;
  58828. }
  58829. PipelineRepresentativeFragmentTestStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58830. {
  58831. pNext = pNext_;
  58832. return *this;
  58833. }
  58834. PipelineRepresentativeFragmentTestStateCreateInfoNV & setRepresentativeFragmentTestEnable( VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable_ ) VULKAN_HPP_NOEXCEPT
  58835. {
  58836. representativeFragmentTestEnable = representativeFragmentTestEnable_;
  58837. return *this;
  58838. }
  58839. operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  58840. {
  58841. return *reinterpret_cast<const VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
  58842. }
  58843. operator VkPipelineRepresentativeFragmentTestStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  58844. {
  58845. return *reinterpret_cast<VkPipelineRepresentativeFragmentTestStateCreateInfoNV*>( this );
  58846. }
  58847. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58848. auto operator<=>( PipelineRepresentativeFragmentTestStateCreateInfoNV const& ) const = default;
  58849. #else
  58850. bool operator==( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58851. {
  58852. return ( sType == rhs.sType )
  58853. && ( pNext == rhs.pNext )
  58854. && ( representativeFragmentTestEnable == rhs.representativeFragmentTestEnable );
  58855. }
  58856. bool operator!=( PipelineRepresentativeFragmentTestStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  58857. {
  58858. return !operator==( rhs );
  58859. }
  58860. #endif
  58861. public:
  58862. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV;
  58863. const void* pNext = {};
  58864. VULKAN_HPP_NAMESPACE::Bool32 representativeFragmentTestEnable = {};
  58865. };
  58866. static_assert( sizeof( PipelineRepresentativeFragmentTestStateCreateInfoNV ) == sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ), "struct and wrapper have different size!" );
  58867. static_assert( std::is_standard_layout<PipelineRepresentativeFragmentTestStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  58868. template <>
  58869. struct CppType<StructureType, StructureType::ePipelineRepresentativeFragmentTestStateCreateInfoNV>
  58870. {
  58871. using Type = PipelineRepresentativeFragmentTestStateCreateInfoNV;
  58872. };
  58873. struct PipelineSampleLocationsStateCreateInfoEXT
  58874. {
  58875. static const bool allowDuplicate = false;
  58876. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
  58877. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58878. VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT(VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
  58879. : sampleLocationsEnable( sampleLocationsEnable_ ), sampleLocationsInfo( sampleLocationsInfo_ )
  58880. {}
  58881. VULKAN_HPP_CONSTEXPR PipelineSampleLocationsStateCreateInfoEXT( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58882. PipelineSampleLocationsStateCreateInfoEXT( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58883. : PipelineSampleLocationsStateCreateInfoEXT( *reinterpret_cast<PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs ) )
  58884. {}
  58885. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58886. VULKAN_HPP_CONSTEXPR_14 PipelineSampleLocationsStateCreateInfoEXT & operator=( PipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58887. PipelineSampleLocationsStateCreateInfoEXT & operator=( VkPipelineSampleLocationsStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58888. {
  58889. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT const *>( &rhs );
  58890. return *this;
  58891. }
  58892. PipelineSampleLocationsStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  58893. {
  58894. pNext = pNext_;
  58895. return *this;
  58896. }
  58897. PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsEnable( VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable_ ) VULKAN_HPP_NOEXCEPT
  58898. {
  58899. sampleLocationsEnable = sampleLocationsEnable_;
  58900. return *this;
  58901. }
  58902. PipelineSampleLocationsStateCreateInfoEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
  58903. {
  58904. sampleLocationsInfo = sampleLocationsInfo_;
  58905. return *this;
  58906. }
  58907. operator VkPipelineSampleLocationsStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58908. {
  58909. return *reinterpret_cast<const VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
  58910. }
  58911. operator VkPipelineSampleLocationsStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58912. {
  58913. return *reinterpret_cast<VkPipelineSampleLocationsStateCreateInfoEXT*>( this );
  58914. }
  58915. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58916. auto operator<=>( PipelineSampleLocationsStateCreateInfoEXT const& ) const = default;
  58917. #else
  58918. bool operator==( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58919. {
  58920. return ( sType == rhs.sType )
  58921. && ( pNext == rhs.pNext )
  58922. && ( sampleLocationsEnable == rhs.sampleLocationsEnable )
  58923. && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
  58924. }
  58925. bool operator!=( PipelineSampleLocationsStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58926. {
  58927. return !operator==( rhs );
  58928. }
  58929. #endif
  58930. public:
  58931. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineSampleLocationsStateCreateInfoEXT;
  58932. const void* pNext = {};
  58933. VULKAN_HPP_NAMESPACE::Bool32 sampleLocationsEnable = {};
  58934. VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
  58935. };
  58936. static_assert( sizeof( PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ), "struct and wrapper have different size!" );
  58937. static_assert( std::is_standard_layout<PipelineSampleLocationsStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58938. template <>
  58939. struct CppType<StructureType, StructureType::ePipelineSampleLocationsStateCreateInfoEXT>
  58940. {
  58941. using Type = PipelineSampleLocationsStateCreateInfoEXT;
  58942. };
  58943. struct PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT
  58944. {
  58945. static const bool allowDuplicate = false;
  58946. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
  58947. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58948. VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT(uint32_t requiredSubgroupSize_ = {}) VULKAN_HPP_NOEXCEPT
  58949. : requiredSubgroupSize( requiredSubgroupSize_ )
  58950. {}
  58951. VULKAN_HPP_CONSTEXPR PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58952. PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58953. : PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT( *reinterpret_cast<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs ) )
  58954. {}
  58955. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  58956. VULKAN_HPP_CONSTEXPR_14 PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  58957. PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT & operator=( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  58958. {
  58959. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const *>( &rhs );
  58960. return *this;
  58961. }
  58962. operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  58963. {
  58964. return *reinterpret_cast<const VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
  58965. }
  58966. operator VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  58967. {
  58968. return *reinterpret_cast<VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT*>( this );
  58969. }
  58970. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  58971. auto operator<=>( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& ) const = default;
  58972. #else
  58973. bool operator==( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58974. {
  58975. return ( sType == rhs.sType )
  58976. && ( pNext == rhs.pNext )
  58977. && ( requiredSubgroupSize == rhs.requiredSubgroupSize );
  58978. }
  58979. bool operator!=( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  58980. {
  58981. return !operator==( rhs );
  58982. }
  58983. #endif
  58984. public:
  58985. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
  58986. void* pNext = {};
  58987. uint32_t requiredSubgroupSize = {};
  58988. };
  58989. static_assert( sizeof( PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ) == sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT ), "struct and wrapper have different size!" );
  58990. static_assert( std::is_standard_layout<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  58991. template <>
  58992. struct CppType<StructureType, StructureType::ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT>
  58993. {
  58994. using Type = PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
  58995. };
  58996. struct PipelineTessellationDomainOriginStateCreateInfo
  58997. {
  58998. static const bool allowDuplicate = false;
  58999. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
  59000. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59001. VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo(VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft) VULKAN_HPP_NOEXCEPT
  59002. : domainOrigin( domainOrigin_ )
  59003. {}
  59004. VULKAN_HPP_CONSTEXPR PipelineTessellationDomainOriginStateCreateInfo( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59005. PipelineTessellationDomainOriginStateCreateInfo( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  59006. : PipelineTessellationDomainOriginStateCreateInfo( *reinterpret_cast<PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs ) )
  59007. {}
  59008. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59009. VULKAN_HPP_CONSTEXPR_14 PipelineTessellationDomainOriginStateCreateInfo & operator=( PipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59010. PipelineTessellationDomainOriginStateCreateInfo & operator=( VkPipelineTessellationDomainOriginStateCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  59011. {
  59012. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo const *>( &rhs );
  59013. return *this;
  59014. }
  59015. PipelineTessellationDomainOriginStateCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59016. {
  59017. pNext = pNext_;
  59018. return *this;
  59019. }
  59020. PipelineTessellationDomainOriginStateCreateInfo & setDomainOrigin( VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin_ ) VULKAN_HPP_NOEXCEPT
  59021. {
  59022. domainOrigin = domainOrigin_;
  59023. return *this;
  59024. }
  59025. operator VkPipelineTessellationDomainOriginStateCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  59026. {
  59027. return *reinterpret_cast<const VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
  59028. }
  59029. operator VkPipelineTessellationDomainOriginStateCreateInfo &() VULKAN_HPP_NOEXCEPT
  59030. {
  59031. return *reinterpret_cast<VkPipelineTessellationDomainOriginStateCreateInfo*>( this );
  59032. }
  59033. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59034. auto operator<=>( PipelineTessellationDomainOriginStateCreateInfo const& ) const = default;
  59035. #else
  59036. bool operator==( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  59037. {
  59038. return ( sType == rhs.sType )
  59039. && ( pNext == rhs.pNext )
  59040. && ( domainOrigin == rhs.domainOrigin );
  59041. }
  59042. bool operator!=( PipelineTessellationDomainOriginStateCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  59043. {
  59044. return !operator==( rhs );
  59045. }
  59046. #endif
  59047. public:
  59048. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineTessellationDomainOriginStateCreateInfo;
  59049. const void* pNext = {};
  59050. VULKAN_HPP_NAMESPACE::TessellationDomainOrigin domainOrigin = VULKAN_HPP_NAMESPACE::TessellationDomainOrigin::eUpperLeft;
  59051. };
  59052. static_assert( sizeof( PipelineTessellationDomainOriginStateCreateInfo ) == sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ), "struct and wrapper have different size!" );
  59053. static_assert( std::is_standard_layout<PipelineTessellationDomainOriginStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
  59054. template <>
  59055. struct CppType<StructureType, StructureType::ePipelineTessellationDomainOriginStateCreateInfo>
  59056. {
  59057. using Type = PipelineTessellationDomainOriginStateCreateInfo;
  59058. };
  59059. using PipelineTessellationDomainOriginStateCreateInfoKHR = PipelineTessellationDomainOriginStateCreateInfo;
  59060. struct VertexInputBindingDivisorDescriptionEXT
  59061. {
  59062. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59063. VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT(uint32_t binding_ = {}, uint32_t divisor_ = {}) VULKAN_HPP_NOEXCEPT
  59064. : binding( binding_ ), divisor( divisor_ )
  59065. {}
  59066. VULKAN_HPP_CONSTEXPR VertexInputBindingDivisorDescriptionEXT( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59067. VertexInputBindingDivisorDescriptionEXT( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  59068. : VertexInputBindingDivisorDescriptionEXT( *reinterpret_cast<VertexInputBindingDivisorDescriptionEXT const *>( &rhs ) )
  59069. {}
  59070. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59071. VULKAN_HPP_CONSTEXPR_14 VertexInputBindingDivisorDescriptionEXT & operator=( VertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59072. VertexInputBindingDivisorDescriptionEXT & operator=( VkVertexInputBindingDivisorDescriptionEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  59073. {
  59074. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT const *>( &rhs );
  59075. return *this;
  59076. }
  59077. VertexInputBindingDivisorDescriptionEXT & setBinding( uint32_t binding_ ) VULKAN_HPP_NOEXCEPT
  59078. {
  59079. binding = binding_;
  59080. return *this;
  59081. }
  59082. VertexInputBindingDivisorDescriptionEXT & setDivisor( uint32_t divisor_ ) VULKAN_HPP_NOEXCEPT
  59083. {
  59084. divisor = divisor_;
  59085. return *this;
  59086. }
  59087. operator VkVertexInputBindingDivisorDescriptionEXT const&() const VULKAN_HPP_NOEXCEPT
  59088. {
  59089. return *reinterpret_cast<const VkVertexInputBindingDivisorDescriptionEXT*>( this );
  59090. }
  59091. operator VkVertexInputBindingDivisorDescriptionEXT &() VULKAN_HPP_NOEXCEPT
  59092. {
  59093. return *reinterpret_cast<VkVertexInputBindingDivisorDescriptionEXT*>( this );
  59094. }
  59095. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59096. auto operator<=>( VertexInputBindingDivisorDescriptionEXT const& ) const = default;
  59097. #else
  59098. bool operator==( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  59099. {
  59100. return ( binding == rhs.binding )
  59101. && ( divisor == rhs.divisor );
  59102. }
  59103. bool operator!=( VertexInputBindingDivisorDescriptionEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  59104. {
  59105. return !operator==( rhs );
  59106. }
  59107. #endif
  59108. public:
  59109. uint32_t binding = {};
  59110. uint32_t divisor = {};
  59111. };
  59112. static_assert( sizeof( VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ), "struct and wrapper have different size!" );
  59113. static_assert( std::is_standard_layout<VertexInputBindingDivisorDescriptionEXT>::value, "struct wrapper is not a standard layout!" );
  59114. struct PipelineVertexInputDivisorStateCreateInfoEXT
  59115. {
  59116. static const bool allowDuplicate = false;
  59117. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
  59118. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59119. VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT(uint32_t vertexBindingDivisorCount_ = {}, const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ = {}) VULKAN_HPP_NOEXCEPT
  59120. : vertexBindingDivisorCount( vertexBindingDivisorCount_ ), pVertexBindingDivisors( pVertexBindingDivisors_ )
  59121. {}
  59122. VULKAN_HPP_CONSTEXPR PipelineVertexInputDivisorStateCreateInfoEXT( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59123. PipelineVertexInputDivisorStateCreateInfoEXT( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  59124. : PipelineVertexInputDivisorStateCreateInfoEXT( *reinterpret_cast<PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs ) )
  59125. {}
  59126. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59127. PipelineVertexInputDivisorStateCreateInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ )
  59128. : vertexBindingDivisorCount( static_cast<uint32_t>( vertexBindingDivisors_.size() ) ), pVertexBindingDivisors( vertexBindingDivisors_.data() )
  59129. {}
  59130. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59131. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59132. VULKAN_HPP_CONSTEXPR_14 PipelineVertexInputDivisorStateCreateInfoEXT & operator=( PipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59133. PipelineVertexInputDivisorStateCreateInfoEXT & operator=( VkPipelineVertexInputDivisorStateCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  59134. {
  59135. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT const *>( &rhs );
  59136. return *this;
  59137. }
  59138. PipelineVertexInputDivisorStateCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59139. {
  59140. pNext = pNext_;
  59141. return *this;
  59142. }
  59143. PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisorCount( uint32_t vertexBindingDivisorCount_ ) VULKAN_HPP_NOEXCEPT
  59144. {
  59145. vertexBindingDivisorCount = vertexBindingDivisorCount_;
  59146. return *this;
  59147. }
  59148. PipelineVertexInputDivisorStateCreateInfoEXT & setPVertexBindingDivisors( const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
  59149. {
  59150. pVertexBindingDivisors = pVertexBindingDivisors_;
  59151. return *this;
  59152. }
  59153. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59154. PipelineVertexInputDivisorStateCreateInfoEXT & setVertexBindingDivisors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT> const & vertexBindingDivisors_ ) VULKAN_HPP_NOEXCEPT
  59155. {
  59156. vertexBindingDivisorCount = static_cast<uint32_t>( vertexBindingDivisors_.size() );
  59157. pVertexBindingDivisors = vertexBindingDivisors_.data();
  59158. return *this;
  59159. }
  59160. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59161. operator VkPipelineVertexInputDivisorStateCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  59162. {
  59163. return *reinterpret_cast<const VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
  59164. }
  59165. operator VkPipelineVertexInputDivisorStateCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  59166. {
  59167. return *reinterpret_cast<VkPipelineVertexInputDivisorStateCreateInfoEXT*>( this );
  59168. }
  59169. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59170. auto operator<=>( PipelineVertexInputDivisorStateCreateInfoEXT const& ) const = default;
  59171. #else
  59172. bool operator==( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  59173. {
  59174. return ( sType == rhs.sType )
  59175. && ( pNext == rhs.pNext )
  59176. && ( vertexBindingDivisorCount == rhs.vertexBindingDivisorCount )
  59177. && ( pVertexBindingDivisors == rhs.pVertexBindingDivisors );
  59178. }
  59179. bool operator!=( PipelineVertexInputDivisorStateCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  59180. {
  59181. return !operator==( rhs );
  59182. }
  59183. #endif
  59184. public:
  59185. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT;
  59186. const void* pNext = {};
  59187. uint32_t vertexBindingDivisorCount = {};
  59188. const VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT* pVertexBindingDivisors = {};
  59189. };
  59190. static_assert( sizeof( PipelineVertexInputDivisorStateCreateInfoEXT ) == sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ), "struct and wrapper have different size!" );
  59191. static_assert( std::is_standard_layout<PipelineVertexInputDivisorStateCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  59192. template <>
  59193. struct CppType<StructureType, StructureType::ePipelineVertexInputDivisorStateCreateInfoEXT>
  59194. {
  59195. using Type = PipelineVertexInputDivisorStateCreateInfoEXT;
  59196. };
  59197. struct PipelineViewportCoarseSampleOrderStateCreateInfoNV
  59198. {
  59199. static const bool allowDuplicate = false;
  59200. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
  59201. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59202. VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV(VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault, uint32_t customSampleOrderCount_ = {}, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ = {}) VULKAN_HPP_NOEXCEPT
  59203. : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( customSampleOrderCount_ ), pCustomSampleOrders( pCustomSampleOrders_ )
  59204. {}
  59205. VULKAN_HPP_CONSTEXPR PipelineViewportCoarseSampleOrderStateCreateInfoNV( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59206. PipelineViewportCoarseSampleOrderStateCreateInfoNV( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59207. : PipelineViewportCoarseSampleOrderStateCreateInfoNV( *reinterpret_cast<PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs ) )
  59208. {}
  59209. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59210. PipelineViewportCoarseSampleOrderStateCreateInfoNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ )
  59211. : sampleOrderType( sampleOrderType_ ), customSampleOrderCount( static_cast<uint32_t>( customSampleOrders_.size() ) ), pCustomSampleOrders( customSampleOrders_.data() )
  59212. {}
  59213. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59214. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59215. VULKAN_HPP_CONSTEXPR_14 PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59216. PipelineViewportCoarseSampleOrderStateCreateInfoNV & operator=( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59217. {
  59218. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV const *>( &rhs );
  59219. return *this;
  59220. }
  59221. PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59222. {
  59223. pNext = pNext_;
  59224. return *this;
  59225. }
  59226. PipelineViewportCoarseSampleOrderStateCreateInfoNV & setSampleOrderType( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType_ ) VULKAN_HPP_NOEXCEPT
  59227. {
  59228. sampleOrderType = sampleOrderType_;
  59229. return *this;
  59230. }
  59231. PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrderCount( uint32_t customSampleOrderCount_ ) VULKAN_HPP_NOEXCEPT
  59232. {
  59233. customSampleOrderCount = customSampleOrderCount_;
  59234. return *this;
  59235. }
  59236. PipelineViewportCoarseSampleOrderStateCreateInfoNV & setPCustomSampleOrders( const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders_ ) VULKAN_HPP_NOEXCEPT
  59237. {
  59238. pCustomSampleOrders = pCustomSampleOrders_;
  59239. return *this;
  59240. }
  59241. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59242. PipelineViewportCoarseSampleOrderStateCreateInfoNV & setCustomSampleOrders( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders_ ) VULKAN_HPP_NOEXCEPT
  59243. {
  59244. customSampleOrderCount = static_cast<uint32_t>( customSampleOrders_.size() );
  59245. pCustomSampleOrders = customSampleOrders_.data();
  59246. return *this;
  59247. }
  59248. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59249. operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  59250. {
  59251. return *reinterpret_cast<const VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
  59252. }
  59253. operator VkPipelineViewportCoarseSampleOrderStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  59254. {
  59255. return *reinterpret_cast<VkPipelineViewportCoarseSampleOrderStateCreateInfoNV*>( this );
  59256. }
  59257. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59258. auto operator<=>( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& ) const = default;
  59259. #else
  59260. bool operator==( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59261. {
  59262. return ( sType == rhs.sType )
  59263. && ( pNext == rhs.pNext )
  59264. && ( sampleOrderType == rhs.sampleOrderType )
  59265. && ( customSampleOrderCount == rhs.customSampleOrderCount )
  59266. && ( pCustomSampleOrders == rhs.pCustomSampleOrders );
  59267. }
  59268. bool operator!=( PipelineViewportCoarseSampleOrderStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59269. {
  59270. return !operator==( rhs );
  59271. }
  59272. #endif
  59273. public:
  59274. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV;
  59275. const void* pNext = {};
  59276. VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType = VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV::eDefault;
  59277. uint32_t customSampleOrderCount = {};
  59278. const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders = {};
  59279. };
  59280. static_assert( sizeof( PipelineViewportCoarseSampleOrderStateCreateInfoNV ) == sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ), "struct and wrapper have different size!" );
  59281. static_assert( std::is_standard_layout<PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  59282. template <>
  59283. struct CppType<StructureType, StructureType::ePipelineViewportCoarseSampleOrderStateCreateInfoNV>
  59284. {
  59285. using Type = PipelineViewportCoarseSampleOrderStateCreateInfoNV;
  59286. };
  59287. struct PipelineViewportExclusiveScissorStateCreateInfoNV
  59288. {
  59289. static const bool allowDuplicate = false;
  59290. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
  59291. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59292. VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV(uint32_t exclusiveScissorCount_ = {}, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ = {}) VULKAN_HPP_NOEXCEPT
  59293. : exclusiveScissorCount( exclusiveScissorCount_ ), pExclusiveScissors( pExclusiveScissors_ )
  59294. {}
  59295. VULKAN_HPP_CONSTEXPR PipelineViewportExclusiveScissorStateCreateInfoNV( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59296. PipelineViewportExclusiveScissorStateCreateInfoNV( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59297. : PipelineViewportExclusiveScissorStateCreateInfoNV( *reinterpret_cast<PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs ) )
  59298. {}
  59299. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59300. PipelineViewportExclusiveScissorStateCreateInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ )
  59301. : exclusiveScissorCount( static_cast<uint32_t>( exclusiveScissors_.size() ) ), pExclusiveScissors( exclusiveScissors_.data() )
  59302. {}
  59303. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59304. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59305. VULKAN_HPP_CONSTEXPR_14 PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( PipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59306. PipelineViewportExclusiveScissorStateCreateInfoNV & operator=( VkPipelineViewportExclusiveScissorStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59307. {
  59308. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV const *>( &rhs );
  59309. return *this;
  59310. }
  59311. PipelineViewportExclusiveScissorStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59312. {
  59313. pNext = pNext_;
  59314. return *this;
  59315. }
  59316. PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissorCount( uint32_t exclusiveScissorCount_ ) VULKAN_HPP_NOEXCEPT
  59317. {
  59318. exclusiveScissorCount = exclusiveScissorCount_;
  59319. return *this;
  59320. }
  59321. PipelineViewportExclusiveScissorStateCreateInfoNV & setPExclusiveScissors( const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
  59322. {
  59323. pExclusiveScissors = pExclusiveScissors_;
  59324. return *this;
  59325. }
  59326. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59327. PipelineViewportExclusiveScissorStateCreateInfoNV & setExclusiveScissors( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors_ ) VULKAN_HPP_NOEXCEPT
  59328. {
  59329. exclusiveScissorCount = static_cast<uint32_t>( exclusiveScissors_.size() );
  59330. pExclusiveScissors = exclusiveScissors_.data();
  59331. return *this;
  59332. }
  59333. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59334. operator VkPipelineViewportExclusiveScissorStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  59335. {
  59336. return *reinterpret_cast<const VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
  59337. }
  59338. operator VkPipelineViewportExclusiveScissorStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  59339. {
  59340. return *reinterpret_cast<VkPipelineViewportExclusiveScissorStateCreateInfoNV*>( this );
  59341. }
  59342. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59343. auto operator<=>( PipelineViewportExclusiveScissorStateCreateInfoNV const& ) const = default;
  59344. #else
  59345. bool operator==( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59346. {
  59347. return ( sType == rhs.sType )
  59348. && ( pNext == rhs.pNext )
  59349. && ( exclusiveScissorCount == rhs.exclusiveScissorCount )
  59350. && ( pExclusiveScissors == rhs.pExclusiveScissors );
  59351. }
  59352. bool operator!=( PipelineViewportExclusiveScissorStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59353. {
  59354. return !operator==( rhs );
  59355. }
  59356. #endif
  59357. public:
  59358. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV;
  59359. const void* pNext = {};
  59360. uint32_t exclusiveScissorCount = {};
  59361. const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors = {};
  59362. };
  59363. static_assert( sizeof( PipelineViewportExclusiveScissorStateCreateInfoNV ) == sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ), "struct and wrapper have different size!" );
  59364. static_assert( std::is_standard_layout<PipelineViewportExclusiveScissorStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  59365. template <>
  59366. struct CppType<StructureType, StructureType::ePipelineViewportExclusiveScissorStateCreateInfoNV>
  59367. {
  59368. using Type = PipelineViewportExclusiveScissorStateCreateInfoNV;
  59369. };
  59370. struct PipelineViewportShadingRateImageStateCreateInfoNV
  59371. {
  59372. static const bool allowDuplicate = false;
  59373. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
  59374. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59375. VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ = {}) VULKAN_HPP_NOEXCEPT
  59376. : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( viewportCount_ ), pShadingRatePalettes( pShadingRatePalettes_ )
  59377. {}
  59378. VULKAN_HPP_CONSTEXPR PipelineViewportShadingRateImageStateCreateInfoNV( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59379. PipelineViewportShadingRateImageStateCreateInfoNV( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59380. : PipelineViewportShadingRateImageStateCreateInfoNV( *reinterpret_cast<PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs ) )
  59381. {}
  59382. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59383. PipelineViewportShadingRateImageStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ )
  59384. : shadingRateImageEnable( shadingRateImageEnable_ ), viewportCount( static_cast<uint32_t>( shadingRatePalettes_.size() ) ), pShadingRatePalettes( shadingRatePalettes_.data() )
  59385. {}
  59386. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59387. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59388. VULKAN_HPP_CONSTEXPR_14 PipelineViewportShadingRateImageStateCreateInfoNV & operator=( PipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59389. PipelineViewportShadingRateImageStateCreateInfoNV & operator=( VkPipelineViewportShadingRateImageStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59390. {
  59391. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV const *>( &rhs );
  59392. return *this;
  59393. }
  59394. PipelineViewportShadingRateImageStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59395. {
  59396. pNext = pNext_;
  59397. return *this;
  59398. }
  59399. PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRateImageEnable( VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable_ ) VULKAN_HPP_NOEXCEPT
  59400. {
  59401. shadingRateImageEnable = shadingRateImageEnable_;
  59402. return *this;
  59403. }
  59404. PipelineViewportShadingRateImageStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
  59405. {
  59406. viewportCount = viewportCount_;
  59407. return *this;
  59408. }
  59409. PipelineViewportShadingRateImageStateCreateInfoNV & setPShadingRatePalettes( const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
  59410. {
  59411. pShadingRatePalettes = pShadingRatePalettes_;
  59412. return *this;
  59413. }
  59414. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59415. PipelineViewportShadingRateImageStateCreateInfoNV & setShadingRatePalettes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes_ ) VULKAN_HPP_NOEXCEPT
  59416. {
  59417. viewportCount = static_cast<uint32_t>( shadingRatePalettes_.size() );
  59418. pShadingRatePalettes = shadingRatePalettes_.data();
  59419. return *this;
  59420. }
  59421. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59422. operator VkPipelineViewportShadingRateImageStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  59423. {
  59424. return *reinterpret_cast<const VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
  59425. }
  59426. operator VkPipelineViewportShadingRateImageStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  59427. {
  59428. return *reinterpret_cast<VkPipelineViewportShadingRateImageStateCreateInfoNV*>( this );
  59429. }
  59430. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59431. auto operator<=>( PipelineViewportShadingRateImageStateCreateInfoNV const& ) const = default;
  59432. #else
  59433. bool operator==( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59434. {
  59435. return ( sType == rhs.sType )
  59436. && ( pNext == rhs.pNext )
  59437. && ( shadingRateImageEnable == rhs.shadingRateImageEnable )
  59438. && ( viewportCount == rhs.viewportCount )
  59439. && ( pShadingRatePalettes == rhs.pShadingRatePalettes );
  59440. }
  59441. bool operator!=( PipelineViewportShadingRateImageStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59442. {
  59443. return !operator==( rhs );
  59444. }
  59445. #endif
  59446. public:
  59447. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV;
  59448. const void* pNext = {};
  59449. VULKAN_HPP_NAMESPACE::Bool32 shadingRateImageEnable = {};
  59450. uint32_t viewportCount = {};
  59451. const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes = {};
  59452. };
  59453. static_assert( sizeof( PipelineViewportShadingRateImageStateCreateInfoNV ) == sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ), "struct and wrapper have different size!" );
  59454. static_assert( std::is_standard_layout<PipelineViewportShadingRateImageStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  59455. template <>
  59456. struct CppType<StructureType, StructureType::ePipelineViewportShadingRateImageStateCreateInfoNV>
  59457. {
  59458. using Type = PipelineViewportShadingRateImageStateCreateInfoNV;
  59459. };
  59460. struct ViewportSwizzleNV
  59461. {
  59462. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59463. VULKAN_HPP_CONSTEXPR ViewportSwizzleNV(VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX, VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX) VULKAN_HPP_NOEXCEPT
  59464. : x( x_ ), y( y_ ), z( z_ ), w( w_ )
  59465. {}
  59466. VULKAN_HPP_CONSTEXPR ViewportSwizzleNV( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59467. ViewportSwizzleNV( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59468. : ViewportSwizzleNV( *reinterpret_cast<ViewportSwizzleNV const *>( &rhs ) )
  59469. {}
  59470. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59471. VULKAN_HPP_CONSTEXPR_14 ViewportSwizzleNV & operator=( ViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59472. ViewportSwizzleNV & operator=( VkViewportSwizzleNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59473. {
  59474. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV const *>( &rhs );
  59475. return *this;
  59476. }
  59477. ViewportSwizzleNV & setX( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x_ ) VULKAN_HPP_NOEXCEPT
  59478. {
  59479. x = x_;
  59480. return *this;
  59481. }
  59482. ViewportSwizzleNV & setY( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y_ ) VULKAN_HPP_NOEXCEPT
  59483. {
  59484. y = y_;
  59485. return *this;
  59486. }
  59487. ViewportSwizzleNV & setZ( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z_ ) VULKAN_HPP_NOEXCEPT
  59488. {
  59489. z = z_;
  59490. return *this;
  59491. }
  59492. ViewportSwizzleNV & setW( VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w_ ) VULKAN_HPP_NOEXCEPT
  59493. {
  59494. w = w_;
  59495. return *this;
  59496. }
  59497. operator VkViewportSwizzleNV const&() const VULKAN_HPP_NOEXCEPT
  59498. {
  59499. return *reinterpret_cast<const VkViewportSwizzleNV*>( this );
  59500. }
  59501. operator VkViewportSwizzleNV &() VULKAN_HPP_NOEXCEPT
  59502. {
  59503. return *reinterpret_cast<VkViewportSwizzleNV*>( this );
  59504. }
  59505. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59506. auto operator<=>( ViewportSwizzleNV const& ) const = default;
  59507. #else
  59508. bool operator==( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59509. {
  59510. return ( x == rhs.x )
  59511. && ( y == rhs.y )
  59512. && ( z == rhs.z )
  59513. && ( w == rhs.w );
  59514. }
  59515. bool operator!=( ViewportSwizzleNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59516. {
  59517. return !operator==( rhs );
  59518. }
  59519. #endif
  59520. public:
  59521. VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV x = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
  59522. VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV y = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
  59523. VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV z = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
  59524. VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV w = VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV::ePositiveX;
  59525. };
  59526. static_assert( sizeof( ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
  59527. static_assert( std::is_standard_layout<ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
  59528. struct PipelineViewportSwizzleStateCreateInfoNV
  59529. {
  59530. static const bool allowDuplicate = false;
  59531. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
  59532. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59533. VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV(VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ = {}) VULKAN_HPP_NOEXCEPT
  59534. : flags( flags_ ), viewportCount( viewportCount_ ), pViewportSwizzles( pViewportSwizzles_ )
  59535. {}
  59536. VULKAN_HPP_CONSTEXPR PipelineViewportSwizzleStateCreateInfoNV( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59537. PipelineViewportSwizzleStateCreateInfoNV( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59538. : PipelineViewportSwizzleStateCreateInfoNV( *reinterpret_cast<PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs ) )
  59539. {}
  59540. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59541. PipelineViewportSwizzleStateCreateInfoNV( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ )
  59542. : flags( flags_ ), viewportCount( static_cast<uint32_t>( viewportSwizzles_.size() ) ), pViewportSwizzles( viewportSwizzles_.data() )
  59543. {}
  59544. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59545. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59546. VULKAN_HPP_CONSTEXPR_14 PipelineViewportSwizzleStateCreateInfoNV & operator=( PipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59547. PipelineViewportSwizzleStateCreateInfoNV & operator=( VkPipelineViewportSwizzleStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59548. {
  59549. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV const *>( &rhs );
  59550. return *this;
  59551. }
  59552. PipelineViewportSwizzleStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59553. {
  59554. pNext = pNext_;
  59555. return *this;
  59556. }
  59557. PipelineViewportSwizzleStateCreateInfoNV & setFlags( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags_ ) VULKAN_HPP_NOEXCEPT
  59558. {
  59559. flags = flags_;
  59560. return *this;
  59561. }
  59562. PipelineViewportSwizzleStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
  59563. {
  59564. viewportCount = viewportCount_;
  59565. return *this;
  59566. }
  59567. PipelineViewportSwizzleStateCreateInfoNV & setPViewportSwizzles( const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
  59568. {
  59569. pViewportSwizzles = pViewportSwizzles_;
  59570. return *this;
  59571. }
  59572. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59573. PipelineViewportSwizzleStateCreateInfoNV & setViewportSwizzles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV> const & viewportSwizzles_ ) VULKAN_HPP_NOEXCEPT
  59574. {
  59575. viewportCount = static_cast<uint32_t>( viewportSwizzles_.size() );
  59576. pViewportSwizzles = viewportSwizzles_.data();
  59577. return *this;
  59578. }
  59579. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59580. operator VkPipelineViewportSwizzleStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  59581. {
  59582. return *reinterpret_cast<const VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
  59583. }
  59584. operator VkPipelineViewportSwizzleStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  59585. {
  59586. return *reinterpret_cast<VkPipelineViewportSwizzleStateCreateInfoNV*>( this );
  59587. }
  59588. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59589. auto operator<=>( PipelineViewportSwizzleStateCreateInfoNV const& ) const = default;
  59590. #else
  59591. bool operator==( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59592. {
  59593. return ( sType == rhs.sType )
  59594. && ( pNext == rhs.pNext )
  59595. && ( flags == rhs.flags )
  59596. && ( viewportCount == rhs.viewportCount )
  59597. && ( pViewportSwizzles == rhs.pViewportSwizzles );
  59598. }
  59599. bool operator!=( PipelineViewportSwizzleStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59600. {
  59601. return !operator==( rhs );
  59602. }
  59603. #endif
  59604. public:
  59605. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportSwizzleStateCreateInfoNV;
  59606. const void* pNext = {};
  59607. VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV flags = {};
  59608. uint32_t viewportCount = {};
  59609. const VULKAN_HPP_NAMESPACE::ViewportSwizzleNV* pViewportSwizzles = {};
  59610. };
  59611. static_assert( sizeof( PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ), "struct and wrapper have different size!" );
  59612. static_assert( std::is_standard_layout<PipelineViewportSwizzleStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  59613. template <>
  59614. struct CppType<StructureType, StructureType::ePipelineViewportSwizzleStateCreateInfoNV>
  59615. {
  59616. using Type = PipelineViewportSwizzleStateCreateInfoNV;
  59617. };
  59618. struct PipelineViewportWScalingStateCreateInfoNV
  59619. {
  59620. static const bool allowDuplicate = false;
  59621. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
  59622. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59623. VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV(VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ = {}, uint32_t viewportCount_ = {}, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ = {}) VULKAN_HPP_NOEXCEPT
  59624. : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( viewportCount_ ), pViewportWScalings( pViewportWScalings_ )
  59625. {}
  59626. VULKAN_HPP_CONSTEXPR PipelineViewportWScalingStateCreateInfoNV( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59627. PipelineViewportWScalingStateCreateInfoNV( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59628. : PipelineViewportWScalingStateCreateInfoNV( *reinterpret_cast<PipelineViewportWScalingStateCreateInfoNV const *>( &rhs ) )
  59629. {}
  59630. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59631. PipelineViewportWScalingStateCreateInfoNV( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ )
  59632. : viewportWScalingEnable( viewportWScalingEnable_ ), viewportCount( static_cast<uint32_t>( viewportWScalings_.size() ) ), pViewportWScalings( viewportWScalings_.data() )
  59633. {}
  59634. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59635. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59636. VULKAN_HPP_CONSTEXPR_14 PipelineViewportWScalingStateCreateInfoNV & operator=( PipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59637. PipelineViewportWScalingStateCreateInfoNV & operator=( VkPipelineViewportWScalingStateCreateInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  59638. {
  59639. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV const *>( &rhs );
  59640. return *this;
  59641. }
  59642. PipelineViewportWScalingStateCreateInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59643. {
  59644. pNext = pNext_;
  59645. return *this;
  59646. }
  59647. PipelineViewportWScalingStateCreateInfoNV & setViewportWScalingEnable( VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable_ ) VULKAN_HPP_NOEXCEPT
  59648. {
  59649. viewportWScalingEnable = viewportWScalingEnable_;
  59650. return *this;
  59651. }
  59652. PipelineViewportWScalingStateCreateInfoNV & setViewportCount( uint32_t viewportCount_ ) VULKAN_HPP_NOEXCEPT
  59653. {
  59654. viewportCount = viewportCount_;
  59655. return *this;
  59656. }
  59657. PipelineViewportWScalingStateCreateInfoNV & setPViewportWScalings( const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings_ ) VULKAN_HPP_NOEXCEPT
  59658. {
  59659. pViewportWScalings = pViewportWScalings_;
  59660. return *this;
  59661. }
  59662. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59663. PipelineViewportWScalingStateCreateInfoNV & setViewportWScalings( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings_ ) VULKAN_HPP_NOEXCEPT
  59664. {
  59665. viewportCount = static_cast<uint32_t>( viewportWScalings_.size() );
  59666. pViewportWScalings = viewportWScalings_.data();
  59667. return *this;
  59668. }
  59669. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59670. operator VkPipelineViewportWScalingStateCreateInfoNV const&() const VULKAN_HPP_NOEXCEPT
  59671. {
  59672. return *reinterpret_cast<const VkPipelineViewportWScalingStateCreateInfoNV*>( this );
  59673. }
  59674. operator VkPipelineViewportWScalingStateCreateInfoNV &() VULKAN_HPP_NOEXCEPT
  59675. {
  59676. return *reinterpret_cast<VkPipelineViewportWScalingStateCreateInfoNV*>( this );
  59677. }
  59678. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59679. auto operator<=>( PipelineViewportWScalingStateCreateInfoNV const& ) const = default;
  59680. #else
  59681. bool operator==( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59682. {
  59683. return ( sType == rhs.sType )
  59684. && ( pNext == rhs.pNext )
  59685. && ( viewportWScalingEnable == rhs.viewportWScalingEnable )
  59686. && ( viewportCount == rhs.viewportCount )
  59687. && ( pViewportWScalings == rhs.pViewportWScalings );
  59688. }
  59689. bool operator!=( PipelineViewportWScalingStateCreateInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  59690. {
  59691. return !operator==( rhs );
  59692. }
  59693. #endif
  59694. public:
  59695. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePipelineViewportWScalingStateCreateInfoNV;
  59696. const void* pNext = {};
  59697. VULKAN_HPP_NAMESPACE::Bool32 viewportWScalingEnable = {};
  59698. uint32_t viewportCount = {};
  59699. const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings = {};
  59700. };
  59701. static_assert( sizeof( PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ), "struct and wrapper have different size!" );
  59702. static_assert( std::is_standard_layout<PipelineViewportWScalingStateCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
  59703. template <>
  59704. struct CppType<StructureType, StructureType::ePipelineViewportWScalingStateCreateInfoNV>
  59705. {
  59706. using Type = PipelineViewportWScalingStateCreateInfoNV;
  59707. };
  59708. #ifdef VK_USE_PLATFORM_GGP
  59709. struct PresentFrameTokenGGP
  59710. {
  59711. static const bool allowDuplicate = false;
  59712. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentFrameTokenGGP;
  59713. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59714. VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP(GgpFrameToken frameToken_ = {}) VULKAN_HPP_NOEXCEPT
  59715. : frameToken( frameToken_ )
  59716. {}
  59717. VULKAN_HPP_CONSTEXPR PresentFrameTokenGGP( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59718. PresentFrameTokenGGP( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
  59719. : PresentFrameTokenGGP( *reinterpret_cast<PresentFrameTokenGGP const *>( &rhs ) )
  59720. {}
  59721. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59722. VULKAN_HPP_CONSTEXPR_14 PresentFrameTokenGGP & operator=( PresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59723. PresentFrameTokenGGP & operator=( VkPresentFrameTokenGGP const & rhs ) VULKAN_HPP_NOEXCEPT
  59724. {
  59725. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP const *>( &rhs );
  59726. return *this;
  59727. }
  59728. PresentFrameTokenGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59729. {
  59730. pNext = pNext_;
  59731. return *this;
  59732. }
  59733. PresentFrameTokenGGP & setFrameToken( GgpFrameToken frameToken_ ) VULKAN_HPP_NOEXCEPT
  59734. {
  59735. frameToken = frameToken_;
  59736. return *this;
  59737. }
  59738. operator VkPresentFrameTokenGGP const&() const VULKAN_HPP_NOEXCEPT
  59739. {
  59740. return *reinterpret_cast<const VkPresentFrameTokenGGP*>( this );
  59741. }
  59742. operator VkPresentFrameTokenGGP &() VULKAN_HPP_NOEXCEPT
  59743. {
  59744. return *reinterpret_cast<VkPresentFrameTokenGGP*>( this );
  59745. }
  59746. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59747. auto operator<=>( PresentFrameTokenGGP const& ) const = default;
  59748. #else
  59749. bool operator==( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
  59750. {
  59751. return ( sType == rhs.sType )
  59752. && ( pNext == rhs.pNext )
  59753. && ( memcmp( &frameToken, &rhs.frameToken, sizeof( GgpFrameToken ) ) == 0 );
  59754. }
  59755. bool operator!=( PresentFrameTokenGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
  59756. {
  59757. return !operator==( rhs );
  59758. }
  59759. #endif
  59760. public:
  59761. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentFrameTokenGGP;
  59762. const void* pNext = {};
  59763. GgpFrameToken frameToken = {};
  59764. };
  59765. static_assert( sizeof( PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
  59766. static_assert( std::is_standard_layout<PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
  59767. template <>
  59768. struct CppType<StructureType, StructureType::ePresentFrameTokenGGP>
  59769. {
  59770. using Type = PresentFrameTokenGGP;
  59771. };
  59772. #endif /*VK_USE_PLATFORM_GGP*/
  59773. struct RectLayerKHR
  59774. {
  59775. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59776. VULKAN_HPP_CONSTEXPR RectLayerKHR(VULKAN_HPP_NAMESPACE::Offset2D offset_ = {}, VULKAN_HPP_NAMESPACE::Extent2D extent_ = {}, uint32_t layer_ = {}) VULKAN_HPP_NOEXCEPT
  59777. : offset( offset_ ), extent( extent_ ), layer( layer_ )
  59778. {}
  59779. VULKAN_HPP_CONSTEXPR RectLayerKHR( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59780. RectLayerKHR( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59781. : RectLayerKHR( *reinterpret_cast<RectLayerKHR const *>( &rhs ) )
  59782. {}
  59783. explicit RectLayerKHR( Rect2D const& rect2D, uint32_t layer_ = {} )
  59784. : offset( rect2D.offset )
  59785. , extent( rect2D.extent )
  59786. , layer( layer_ )
  59787. {}
  59788. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59789. VULKAN_HPP_CONSTEXPR_14 RectLayerKHR & operator=( RectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59790. RectLayerKHR & operator=( VkRectLayerKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59791. {
  59792. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RectLayerKHR const *>( &rhs );
  59793. return *this;
  59794. }
  59795. RectLayerKHR & setOffset( VULKAN_HPP_NAMESPACE::Offset2D const & offset_ ) VULKAN_HPP_NOEXCEPT
  59796. {
  59797. offset = offset_;
  59798. return *this;
  59799. }
  59800. RectLayerKHR & setExtent( VULKAN_HPP_NAMESPACE::Extent2D const & extent_ ) VULKAN_HPP_NOEXCEPT
  59801. {
  59802. extent = extent_;
  59803. return *this;
  59804. }
  59805. RectLayerKHR & setLayer( uint32_t layer_ ) VULKAN_HPP_NOEXCEPT
  59806. {
  59807. layer = layer_;
  59808. return *this;
  59809. }
  59810. operator VkRectLayerKHR const&() const VULKAN_HPP_NOEXCEPT
  59811. {
  59812. return *reinterpret_cast<const VkRectLayerKHR*>( this );
  59813. }
  59814. operator VkRectLayerKHR &() VULKAN_HPP_NOEXCEPT
  59815. {
  59816. return *reinterpret_cast<VkRectLayerKHR*>( this );
  59817. }
  59818. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59819. auto operator<=>( RectLayerKHR const& ) const = default;
  59820. #else
  59821. bool operator==( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59822. {
  59823. return ( offset == rhs.offset )
  59824. && ( extent == rhs.extent )
  59825. && ( layer == rhs.layer );
  59826. }
  59827. bool operator!=( RectLayerKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59828. {
  59829. return !operator==( rhs );
  59830. }
  59831. #endif
  59832. public:
  59833. VULKAN_HPP_NAMESPACE::Offset2D offset = {};
  59834. VULKAN_HPP_NAMESPACE::Extent2D extent = {};
  59835. uint32_t layer = {};
  59836. };
  59837. static_assert( sizeof( RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
  59838. static_assert( std::is_standard_layout<RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
  59839. struct PresentRegionKHR
  59840. {
  59841. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59842. VULKAN_HPP_CONSTEXPR PresentRegionKHR(uint32_t rectangleCount_ = {}, const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ = {}) VULKAN_HPP_NOEXCEPT
  59843. : rectangleCount( rectangleCount_ ), pRectangles( pRectangles_ )
  59844. {}
  59845. VULKAN_HPP_CONSTEXPR PresentRegionKHR( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59846. PresentRegionKHR( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59847. : PresentRegionKHR( *reinterpret_cast<PresentRegionKHR const *>( &rhs ) )
  59848. {}
  59849. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59850. PresentRegionKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ )
  59851. : rectangleCount( static_cast<uint32_t>( rectangles_.size() ) ), pRectangles( rectangles_.data() )
  59852. {}
  59853. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59854. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59855. VULKAN_HPP_CONSTEXPR_14 PresentRegionKHR & operator=( PresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59856. PresentRegionKHR & operator=( VkPresentRegionKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59857. {
  59858. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionKHR const *>( &rhs );
  59859. return *this;
  59860. }
  59861. PresentRegionKHR & setRectangleCount( uint32_t rectangleCount_ ) VULKAN_HPP_NOEXCEPT
  59862. {
  59863. rectangleCount = rectangleCount_;
  59864. return *this;
  59865. }
  59866. PresentRegionKHR & setPRectangles( const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles_ ) VULKAN_HPP_NOEXCEPT
  59867. {
  59868. pRectangles = pRectangles_;
  59869. return *this;
  59870. }
  59871. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59872. PresentRegionKHR & setRectangles( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::RectLayerKHR> const & rectangles_ ) VULKAN_HPP_NOEXCEPT
  59873. {
  59874. rectangleCount = static_cast<uint32_t>( rectangles_.size() );
  59875. pRectangles = rectangles_.data();
  59876. return *this;
  59877. }
  59878. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59879. operator VkPresentRegionKHR const&() const VULKAN_HPP_NOEXCEPT
  59880. {
  59881. return *reinterpret_cast<const VkPresentRegionKHR*>( this );
  59882. }
  59883. operator VkPresentRegionKHR &() VULKAN_HPP_NOEXCEPT
  59884. {
  59885. return *reinterpret_cast<VkPresentRegionKHR*>( this );
  59886. }
  59887. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59888. auto operator<=>( PresentRegionKHR const& ) const = default;
  59889. #else
  59890. bool operator==( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59891. {
  59892. return ( rectangleCount == rhs.rectangleCount )
  59893. && ( pRectangles == rhs.pRectangles );
  59894. }
  59895. bool operator!=( PresentRegionKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59896. {
  59897. return !operator==( rhs );
  59898. }
  59899. #endif
  59900. public:
  59901. uint32_t rectangleCount = {};
  59902. const VULKAN_HPP_NAMESPACE::RectLayerKHR* pRectangles = {};
  59903. };
  59904. static_assert( sizeof( PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
  59905. static_assert( std::is_standard_layout<PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
  59906. struct PresentRegionsKHR
  59907. {
  59908. static const bool allowDuplicate = false;
  59909. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentRegionsKHR;
  59910. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59911. VULKAN_HPP_CONSTEXPR PresentRegionsKHR(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ = {}) VULKAN_HPP_NOEXCEPT
  59912. : swapchainCount( swapchainCount_ ), pRegions( pRegions_ )
  59913. {}
  59914. VULKAN_HPP_CONSTEXPR PresentRegionsKHR( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59915. PresentRegionsKHR( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59916. : PresentRegionsKHR( *reinterpret_cast<PresentRegionsKHR const *>( &rhs ) )
  59917. {}
  59918. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59919. PresentRegionsKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ )
  59920. : swapchainCount( static_cast<uint32_t>( regions_.size() ) ), pRegions( regions_.data() )
  59921. {}
  59922. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59923. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59924. VULKAN_HPP_CONSTEXPR_14 PresentRegionsKHR & operator=( PresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59925. PresentRegionsKHR & operator=( VkPresentRegionsKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  59926. {
  59927. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentRegionsKHR const *>( &rhs );
  59928. return *this;
  59929. }
  59930. PresentRegionsKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  59931. {
  59932. pNext = pNext_;
  59933. return *this;
  59934. }
  59935. PresentRegionsKHR & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
  59936. {
  59937. swapchainCount = swapchainCount_;
  59938. return *this;
  59939. }
  59940. PresentRegionsKHR & setPRegions( const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions_ ) VULKAN_HPP_NOEXCEPT
  59941. {
  59942. pRegions = pRegions_;
  59943. return *this;
  59944. }
  59945. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59946. PresentRegionsKHR & setRegions( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentRegionKHR> const & regions_ ) VULKAN_HPP_NOEXCEPT
  59947. {
  59948. swapchainCount = static_cast<uint32_t>( regions_.size() );
  59949. pRegions = regions_.data();
  59950. return *this;
  59951. }
  59952. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  59953. operator VkPresentRegionsKHR const&() const VULKAN_HPP_NOEXCEPT
  59954. {
  59955. return *reinterpret_cast<const VkPresentRegionsKHR*>( this );
  59956. }
  59957. operator VkPresentRegionsKHR &() VULKAN_HPP_NOEXCEPT
  59958. {
  59959. return *reinterpret_cast<VkPresentRegionsKHR*>( this );
  59960. }
  59961. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  59962. auto operator<=>( PresentRegionsKHR const& ) const = default;
  59963. #else
  59964. bool operator==( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59965. {
  59966. return ( sType == rhs.sType )
  59967. && ( pNext == rhs.pNext )
  59968. && ( swapchainCount == rhs.swapchainCount )
  59969. && ( pRegions == rhs.pRegions );
  59970. }
  59971. bool operator!=( PresentRegionsKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  59972. {
  59973. return !operator==( rhs );
  59974. }
  59975. #endif
  59976. public:
  59977. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentRegionsKHR;
  59978. const void* pNext = {};
  59979. uint32_t swapchainCount = {};
  59980. const VULKAN_HPP_NAMESPACE::PresentRegionKHR* pRegions = {};
  59981. };
  59982. static_assert( sizeof( PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
  59983. static_assert( std::is_standard_layout<PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
  59984. template <>
  59985. struct CppType<StructureType, StructureType::ePresentRegionsKHR>
  59986. {
  59987. using Type = PresentRegionsKHR;
  59988. };
  59989. struct PresentTimeGOOGLE
  59990. {
  59991. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  59992. VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE(uint32_t presentID_ = {}, uint64_t desiredPresentTime_ = {}) VULKAN_HPP_NOEXCEPT
  59993. : presentID( presentID_ ), desiredPresentTime( desiredPresentTime_ )
  59994. {}
  59995. VULKAN_HPP_CONSTEXPR PresentTimeGOOGLE( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  59996. PresentTimeGOOGLE( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  59997. : PresentTimeGOOGLE( *reinterpret_cast<PresentTimeGOOGLE const *>( &rhs ) )
  59998. {}
  59999. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60000. VULKAN_HPP_CONSTEXPR_14 PresentTimeGOOGLE & operator=( PresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60001. PresentTimeGOOGLE & operator=( VkPresentTimeGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  60002. {
  60003. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE const *>( &rhs );
  60004. return *this;
  60005. }
  60006. PresentTimeGOOGLE & setPresentID( uint32_t presentID_ ) VULKAN_HPP_NOEXCEPT
  60007. {
  60008. presentID = presentID_;
  60009. return *this;
  60010. }
  60011. PresentTimeGOOGLE & setDesiredPresentTime( uint64_t desiredPresentTime_ ) VULKAN_HPP_NOEXCEPT
  60012. {
  60013. desiredPresentTime = desiredPresentTime_;
  60014. return *this;
  60015. }
  60016. operator VkPresentTimeGOOGLE const&() const VULKAN_HPP_NOEXCEPT
  60017. {
  60018. return *reinterpret_cast<const VkPresentTimeGOOGLE*>( this );
  60019. }
  60020. operator VkPresentTimeGOOGLE &() VULKAN_HPP_NOEXCEPT
  60021. {
  60022. return *reinterpret_cast<VkPresentTimeGOOGLE*>( this );
  60023. }
  60024. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60025. auto operator<=>( PresentTimeGOOGLE const& ) const = default;
  60026. #else
  60027. bool operator==( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  60028. {
  60029. return ( presentID == rhs.presentID )
  60030. && ( desiredPresentTime == rhs.desiredPresentTime );
  60031. }
  60032. bool operator!=( PresentTimeGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  60033. {
  60034. return !operator==( rhs );
  60035. }
  60036. #endif
  60037. public:
  60038. uint32_t presentID = {};
  60039. uint64_t desiredPresentTime = {};
  60040. };
  60041. static_assert( sizeof( PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
  60042. static_assert( std::is_standard_layout<PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
  60043. struct PresentTimesInfoGOOGLE
  60044. {
  60045. static const bool allowDuplicate = false;
  60046. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::ePresentTimesInfoGOOGLE;
  60047. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60048. VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE(uint32_t swapchainCount_ = {}, const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ = {}) VULKAN_HPP_NOEXCEPT
  60049. : swapchainCount( swapchainCount_ ), pTimes( pTimes_ )
  60050. {}
  60051. VULKAN_HPP_CONSTEXPR PresentTimesInfoGOOGLE( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60052. PresentTimesInfoGOOGLE( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  60053. : PresentTimesInfoGOOGLE( *reinterpret_cast<PresentTimesInfoGOOGLE const *>( &rhs ) )
  60054. {}
  60055. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60056. PresentTimesInfoGOOGLE( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ )
  60057. : swapchainCount( static_cast<uint32_t>( times_.size() ) ), pTimes( times_.data() )
  60058. {}
  60059. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60060. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60061. VULKAN_HPP_CONSTEXPR_14 PresentTimesInfoGOOGLE & operator=( PresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60062. PresentTimesInfoGOOGLE & operator=( VkPresentTimesInfoGOOGLE const & rhs ) VULKAN_HPP_NOEXCEPT
  60063. {
  60064. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE const *>( &rhs );
  60065. return *this;
  60066. }
  60067. PresentTimesInfoGOOGLE & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60068. {
  60069. pNext = pNext_;
  60070. return *this;
  60071. }
  60072. PresentTimesInfoGOOGLE & setSwapchainCount( uint32_t swapchainCount_ ) VULKAN_HPP_NOEXCEPT
  60073. {
  60074. swapchainCount = swapchainCount_;
  60075. return *this;
  60076. }
  60077. PresentTimesInfoGOOGLE & setPTimes( const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes_ ) VULKAN_HPP_NOEXCEPT
  60078. {
  60079. pTimes = pTimes_;
  60080. return *this;
  60081. }
  60082. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60083. PresentTimesInfoGOOGLE & setTimes( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE> const & times_ ) VULKAN_HPP_NOEXCEPT
  60084. {
  60085. swapchainCount = static_cast<uint32_t>( times_.size() );
  60086. pTimes = times_.data();
  60087. return *this;
  60088. }
  60089. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60090. operator VkPresentTimesInfoGOOGLE const&() const VULKAN_HPP_NOEXCEPT
  60091. {
  60092. return *reinterpret_cast<const VkPresentTimesInfoGOOGLE*>( this );
  60093. }
  60094. operator VkPresentTimesInfoGOOGLE &() VULKAN_HPP_NOEXCEPT
  60095. {
  60096. return *reinterpret_cast<VkPresentTimesInfoGOOGLE*>( this );
  60097. }
  60098. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60099. auto operator<=>( PresentTimesInfoGOOGLE const& ) const = default;
  60100. #else
  60101. bool operator==( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  60102. {
  60103. return ( sType == rhs.sType )
  60104. && ( pNext == rhs.pNext )
  60105. && ( swapchainCount == rhs.swapchainCount )
  60106. && ( pTimes == rhs.pTimes );
  60107. }
  60108. bool operator!=( PresentTimesInfoGOOGLE const& rhs ) const VULKAN_HPP_NOEXCEPT
  60109. {
  60110. return !operator==( rhs );
  60111. }
  60112. #endif
  60113. public:
  60114. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::ePresentTimesInfoGOOGLE;
  60115. const void* pNext = {};
  60116. uint32_t swapchainCount = {};
  60117. const VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE* pTimes = {};
  60118. };
  60119. static_assert( sizeof( PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ), "struct and wrapper have different size!" );
  60120. static_assert( std::is_standard_layout<PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
  60121. template <>
  60122. struct CppType<StructureType, StructureType::ePresentTimesInfoGOOGLE>
  60123. {
  60124. using Type = PresentTimesInfoGOOGLE;
  60125. };
  60126. struct ProtectedSubmitInfo
  60127. {
  60128. static const bool allowDuplicate = false;
  60129. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eProtectedSubmitInfo;
  60130. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60131. VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo(VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ = {}) VULKAN_HPP_NOEXCEPT
  60132. : protectedSubmit( protectedSubmit_ )
  60133. {}
  60134. VULKAN_HPP_CONSTEXPR ProtectedSubmitInfo( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60135. ProtectedSubmitInfo( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60136. : ProtectedSubmitInfo( *reinterpret_cast<ProtectedSubmitInfo const *>( &rhs ) )
  60137. {}
  60138. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60139. VULKAN_HPP_CONSTEXPR_14 ProtectedSubmitInfo & operator=( ProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60140. ProtectedSubmitInfo & operator=( VkProtectedSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60141. {
  60142. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo const *>( &rhs );
  60143. return *this;
  60144. }
  60145. ProtectedSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60146. {
  60147. pNext = pNext_;
  60148. return *this;
  60149. }
  60150. ProtectedSubmitInfo & setProtectedSubmit( VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit_ ) VULKAN_HPP_NOEXCEPT
  60151. {
  60152. protectedSubmit = protectedSubmit_;
  60153. return *this;
  60154. }
  60155. operator VkProtectedSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
  60156. {
  60157. return *reinterpret_cast<const VkProtectedSubmitInfo*>( this );
  60158. }
  60159. operator VkProtectedSubmitInfo &() VULKAN_HPP_NOEXCEPT
  60160. {
  60161. return *reinterpret_cast<VkProtectedSubmitInfo*>( this );
  60162. }
  60163. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60164. auto operator<=>( ProtectedSubmitInfo const& ) const = default;
  60165. #else
  60166. bool operator==( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60167. {
  60168. return ( sType == rhs.sType )
  60169. && ( pNext == rhs.pNext )
  60170. && ( protectedSubmit == rhs.protectedSubmit );
  60171. }
  60172. bool operator!=( ProtectedSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60173. {
  60174. return !operator==( rhs );
  60175. }
  60176. #endif
  60177. public:
  60178. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eProtectedSubmitInfo;
  60179. const void* pNext = {};
  60180. VULKAN_HPP_NAMESPACE::Bool32 protectedSubmit = {};
  60181. };
  60182. static_assert( sizeof( ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
  60183. static_assert( std::is_standard_layout<ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
  60184. template <>
  60185. struct CppType<StructureType, StructureType::eProtectedSubmitInfo>
  60186. {
  60187. using Type = ProtectedSubmitInfo;
  60188. };
  60189. struct QueryPoolPerformanceQueryCreateInfoINTEL
  60190. {
  60191. static const bool allowDuplicate = false;
  60192. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
  60193. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60194. VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL(VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual) VULKAN_HPP_NOEXCEPT
  60195. : performanceCountersSampling( performanceCountersSampling_ )
  60196. {}
  60197. VULKAN_HPP_CONSTEXPR QueryPoolPerformanceQueryCreateInfoINTEL( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60198. QueryPoolPerformanceQueryCreateInfoINTEL( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  60199. : QueryPoolPerformanceQueryCreateInfoINTEL( *reinterpret_cast<QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs ) )
  60200. {}
  60201. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60202. VULKAN_HPP_CONSTEXPR_14 QueryPoolPerformanceQueryCreateInfoINTEL & operator=( QueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60203. QueryPoolPerformanceQueryCreateInfoINTEL & operator=( VkQueryPoolPerformanceQueryCreateInfoINTEL const & rhs ) VULKAN_HPP_NOEXCEPT
  60204. {
  60205. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL const *>( &rhs );
  60206. return *this;
  60207. }
  60208. QueryPoolPerformanceQueryCreateInfoINTEL & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60209. {
  60210. pNext = pNext_;
  60211. return *this;
  60212. }
  60213. QueryPoolPerformanceQueryCreateInfoINTEL & setPerformanceCountersSampling( VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling_ ) VULKAN_HPP_NOEXCEPT
  60214. {
  60215. performanceCountersSampling = performanceCountersSampling_;
  60216. return *this;
  60217. }
  60218. operator VkQueryPoolPerformanceQueryCreateInfoINTEL const&() const VULKAN_HPP_NOEXCEPT
  60219. {
  60220. return *reinterpret_cast<const VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
  60221. }
  60222. operator VkQueryPoolPerformanceQueryCreateInfoINTEL &() VULKAN_HPP_NOEXCEPT
  60223. {
  60224. return *reinterpret_cast<VkQueryPoolPerformanceQueryCreateInfoINTEL*>( this );
  60225. }
  60226. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60227. auto operator<=>( QueryPoolPerformanceQueryCreateInfoINTEL const& ) const = default;
  60228. #else
  60229. bool operator==( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  60230. {
  60231. return ( sType == rhs.sType )
  60232. && ( pNext == rhs.pNext )
  60233. && ( performanceCountersSampling == rhs.performanceCountersSampling );
  60234. }
  60235. bool operator!=( QueryPoolPerformanceQueryCreateInfoINTEL const& rhs ) const VULKAN_HPP_NOEXCEPT
  60236. {
  60237. return !operator==( rhs );
  60238. }
  60239. #endif
  60240. public:
  60241. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL;
  60242. const void* pNext = {};
  60243. VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL performanceCountersSampling = VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL::eManual;
  60244. };
  60245. static_assert( sizeof( QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ), "struct and wrapper have different size!" );
  60246. static_assert( std::is_standard_layout<QueryPoolPerformanceQueryCreateInfoINTEL>::value, "struct wrapper is not a standard layout!" );
  60247. template <>
  60248. struct CppType<StructureType, StructureType::eQueryPoolPerformanceQueryCreateInfoINTEL>
  60249. {
  60250. using Type = QueryPoolPerformanceQueryCreateInfoINTEL;
  60251. };
  60252. using QueryPoolCreateInfoINTEL = QueryPoolPerformanceQueryCreateInfoINTEL;
  60253. struct QueueFamilyCheckpointPropertiesNV
  60254. {
  60255. static const bool allowDuplicate = false;
  60256. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eQueueFamilyCheckpointPropertiesNV;
  60257. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60258. VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV(VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask_ = {}) VULKAN_HPP_NOEXCEPT
  60259. : checkpointExecutionStageMask( checkpointExecutionStageMask_ )
  60260. {}
  60261. VULKAN_HPP_CONSTEXPR QueueFamilyCheckpointPropertiesNV( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60262. QueueFamilyCheckpointPropertiesNV( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  60263. : QueueFamilyCheckpointPropertiesNV( *reinterpret_cast<QueueFamilyCheckpointPropertiesNV const *>( &rhs ) )
  60264. {}
  60265. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60266. VULKAN_HPP_CONSTEXPR_14 QueueFamilyCheckpointPropertiesNV & operator=( QueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60267. QueueFamilyCheckpointPropertiesNV & operator=( VkQueueFamilyCheckpointPropertiesNV const & rhs ) VULKAN_HPP_NOEXCEPT
  60268. {
  60269. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV const *>( &rhs );
  60270. return *this;
  60271. }
  60272. operator VkQueueFamilyCheckpointPropertiesNV const&() const VULKAN_HPP_NOEXCEPT
  60273. {
  60274. return *reinterpret_cast<const VkQueueFamilyCheckpointPropertiesNV*>( this );
  60275. }
  60276. operator VkQueueFamilyCheckpointPropertiesNV &() VULKAN_HPP_NOEXCEPT
  60277. {
  60278. return *reinterpret_cast<VkQueueFamilyCheckpointPropertiesNV*>( this );
  60279. }
  60280. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60281. auto operator<=>( QueueFamilyCheckpointPropertiesNV const& ) const = default;
  60282. #else
  60283. bool operator==( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  60284. {
  60285. return ( sType == rhs.sType )
  60286. && ( pNext == rhs.pNext )
  60287. && ( checkpointExecutionStageMask == rhs.checkpointExecutionStageMask );
  60288. }
  60289. bool operator!=( QueueFamilyCheckpointPropertiesNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  60290. {
  60291. return !operator==( rhs );
  60292. }
  60293. #endif
  60294. public:
  60295. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eQueueFamilyCheckpointPropertiesNV;
  60296. void* pNext = {};
  60297. VULKAN_HPP_NAMESPACE::PipelineStageFlags checkpointExecutionStageMask = {};
  60298. };
  60299. static_assert( sizeof( QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ), "struct and wrapper have different size!" );
  60300. static_assert( std::is_standard_layout<QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
  60301. template <>
  60302. struct CppType<StructureType, StructureType::eQueueFamilyCheckpointPropertiesNV>
  60303. {
  60304. using Type = QueueFamilyCheckpointPropertiesNV;
  60305. };
  60306. struct RenderPassAttachmentBeginInfo
  60307. {
  60308. static const bool allowDuplicate = false;
  60309. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassAttachmentBeginInfo;
  60310. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60311. VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo(uint32_t attachmentCount_ = {}, const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ = {}) VULKAN_HPP_NOEXCEPT
  60312. : attachmentCount( attachmentCount_ ), pAttachments( pAttachments_ )
  60313. {}
  60314. VULKAN_HPP_CONSTEXPR RenderPassAttachmentBeginInfo( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60315. RenderPassAttachmentBeginInfo( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60316. : RenderPassAttachmentBeginInfo( *reinterpret_cast<RenderPassAttachmentBeginInfo const *>( &rhs ) )
  60317. {}
  60318. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60319. RenderPassAttachmentBeginInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ )
  60320. : attachmentCount( static_cast<uint32_t>( attachments_.size() ) ), pAttachments( attachments_.data() )
  60321. {}
  60322. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60323. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60324. VULKAN_HPP_CONSTEXPR_14 RenderPassAttachmentBeginInfo & operator=( RenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60325. RenderPassAttachmentBeginInfo & operator=( VkRenderPassAttachmentBeginInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60326. {
  60327. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo const *>( &rhs );
  60328. return *this;
  60329. }
  60330. RenderPassAttachmentBeginInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60331. {
  60332. pNext = pNext_;
  60333. return *this;
  60334. }
  60335. RenderPassAttachmentBeginInfo & setAttachmentCount( uint32_t attachmentCount_ ) VULKAN_HPP_NOEXCEPT
  60336. {
  60337. attachmentCount = attachmentCount_;
  60338. return *this;
  60339. }
  60340. RenderPassAttachmentBeginInfo & setPAttachments( const VULKAN_HPP_NAMESPACE::ImageView* pAttachments_ ) VULKAN_HPP_NOEXCEPT
  60341. {
  60342. pAttachments = pAttachments_;
  60343. return *this;
  60344. }
  60345. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60346. RenderPassAttachmentBeginInfo & setAttachments( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ImageView> const & attachments_ ) VULKAN_HPP_NOEXCEPT
  60347. {
  60348. attachmentCount = static_cast<uint32_t>( attachments_.size() );
  60349. pAttachments = attachments_.data();
  60350. return *this;
  60351. }
  60352. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60353. operator VkRenderPassAttachmentBeginInfo const&() const VULKAN_HPP_NOEXCEPT
  60354. {
  60355. return *reinterpret_cast<const VkRenderPassAttachmentBeginInfo*>( this );
  60356. }
  60357. operator VkRenderPassAttachmentBeginInfo &() VULKAN_HPP_NOEXCEPT
  60358. {
  60359. return *reinterpret_cast<VkRenderPassAttachmentBeginInfo*>( this );
  60360. }
  60361. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60362. auto operator<=>( RenderPassAttachmentBeginInfo const& ) const = default;
  60363. #else
  60364. bool operator==( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60365. {
  60366. return ( sType == rhs.sType )
  60367. && ( pNext == rhs.pNext )
  60368. && ( attachmentCount == rhs.attachmentCount )
  60369. && ( pAttachments == rhs.pAttachments );
  60370. }
  60371. bool operator!=( RenderPassAttachmentBeginInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60372. {
  60373. return !operator==( rhs );
  60374. }
  60375. #endif
  60376. public:
  60377. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassAttachmentBeginInfo;
  60378. const void* pNext = {};
  60379. uint32_t attachmentCount = {};
  60380. const VULKAN_HPP_NAMESPACE::ImageView* pAttachments = {};
  60381. };
  60382. static_assert( sizeof( RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ), "struct and wrapper have different size!" );
  60383. static_assert( std::is_standard_layout<RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
  60384. template <>
  60385. struct CppType<StructureType, StructureType::eRenderPassAttachmentBeginInfo>
  60386. {
  60387. using Type = RenderPassAttachmentBeginInfo;
  60388. };
  60389. using RenderPassAttachmentBeginInfoKHR = RenderPassAttachmentBeginInfo;
  60390. struct RenderPassFragmentDensityMapCreateInfoEXT
  60391. {
  60392. static const bool allowDuplicate = false;
  60393. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
  60394. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60395. VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT(VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment_ = {}) VULKAN_HPP_NOEXCEPT
  60396. : fragmentDensityMapAttachment( fragmentDensityMapAttachment_ )
  60397. {}
  60398. VULKAN_HPP_CONSTEXPR RenderPassFragmentDensityMapCreateInfoEXT( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60399. RenderPassFragmentDensityMapCreateInfoEXT( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60400. : RenderPassFragmentDensityMapCreateInfoEXT( *reinterpret_cast<RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs ) )
  60401. {}
  60402. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60403. VULKAN_HPP_CONSTEXPR_14 RenderPassFragmentDensityMapCreateInfoEXT & operator=( RenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60404. RenderPassFragmentDensityMapCreateInfoEXT & operator=( VkRenderPassFragmentDensityMapCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60405. {
  60406. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT const *>( &rhs );
  60407. return *this;
  60408. }
  60409. RenderPassFragmentDensityMapCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60410. {
  60411. pNext = pNext_;
  60412. return *this;
  60413. }
  60414. RenderPassFragmentDensityMapCreateInfoEXT & setFragmentDensityMapAttachment( VULKAN_HPP_NAMESPACE::AttachmentReference const & fragmentDensityMapAttachment_ ) VULKAN_HPP_NOEXCEPT
  60415. {
  60416. fragmentDensityMapAttachment = fragmentDensityMapAttachment_;
  60417. return *this;
  60418. }
  60419. operator VkRenderPassFragmentDensityMapCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  60420. {
  60421. return *reinterpret_cast<const VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
  60422. }
  60423. operator VkRenderPassFragmentDensityMapCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  60424. {
  60425. return *reinterpret_cast<VkRenderPassFragmentDensityMapCreateInfoEXT*>( this );
  60426. }
  60427. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60428. auto operator<=>( RenderPassFragmentDensityMapCreateInfoEXT const& ) const = default;
  60429. #else
  60430. bool operator==( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60431. {
  60432. return ( sType == rhs.sType )
  60433. && ( pNext == rhs.pNext )
  60434. && ( fragmentDensityMapAttachment == rhs.fragmentDensityMapAttachment );
  60435. }
  60436. bool operator!=( RenderPassFragmentDensityMapCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60437. {
  60438. return !operator==( rhs );
  60439. }
  60440. #endif
  60441. public:
  60442. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassFragmentDensityMapCreateInfoEXT;
  60443. const void* pNext = {};
  60444. VULKAN_HPP_NAMESPACE::AttachmentReference fragmentDensityMapAttachment = {};
  60445. };
  60446. static_assert( sizeof( RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ), "struct and wrapper have different size!" );
  60447. static_assert( std::is_standard_layout<RenderPassFragmentDensityMapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  60448. template <>
  60449. struct CppType<StructureType, StructureType::eRenderPassFragmentDensityMapCreateInfoEXT>
  60450. {
  60451. using Type = RenderPassFragmentDensityMapCreateInfoEXT;
  60452. };
  60453. struct RenderPassInputAttachmentAspectCreateInfo
  60454. {
  60455. static const bool allowDuplicate = false;
  60456. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
  60457. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60458. VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo(uint32_t aspectReferenceCount_ = {}, const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ = {}) VULKAN_HPP_NOEXCEPT
  60459. : aspectReferenceCount( aspectReferenceCount_ ), pAspectReferences( pAspectReferences_ )
  60460. {}
  60461. VULKAN_HPP_CONSTEXPR RenderPassInputAttachmentAspectCreateInfo( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60462. RenderPassInputAttachmentAspectCreateInfo( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60463. : RenderPassInputAttachmentAspectCreateInfo( *reinterpret_cast<RenderPassInputAttachmentAspectCreateInfo const *>( &rhs ) )
  60464. {}
  60465. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60466. RenderPassInputAttachmentAspectCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ )
  60467. : aspectReferenceCount( static_cast<uint32_t>( aspectReferences_.size() ) ), pAspectReferences( aspectReferences_.data() )
  60468. {}
  60469. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60470. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60471. VULKAN_HPP_CONSTEXPR_14 RenderPassInputAttachmentAspectCreateInfo & operator=( RenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60472. RenderPassInputAttachmentAspectCreateInfo & operator=( VkRenderPassInputAttachmentAspectCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60473. {
  60474. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo const *>( &rhs );
  60475. return *this;
  60476. }
  60477. RenderPassInputAttachmentAspectCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60478. {
  60479. pNext = pNext_;
  60480. return *this;
  60481. }
  60482. RenderPassInputAttachmentAspectCreateInfo & setAspectReferenceCount( uint32_t aspectReferenceCount_ ) VULKAN_HPP_NOEXCEPT
  60483. {
  60484. aspectReferenceCount = aspectReferenceCount_;
  60485. return *this;
  60486. }
  60487. RenderPassInputAttachmentAspectCreateInfo & setPAspectReferences( const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences_ ) VULKAN_HPP_NOEXCEPT
  60488. {
  60489. pAspectReferences = pAspectReferences_;
  60490. return *this;
  60491. }
  60492. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60493. RenderPassInputAttachmentAspectCreateInfo & setAspectReferences( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference> const & aspectReferences_ ) VULKAN_HPP_NOEXCEPT
  60494. {
  60495. aspectReferenceCount = static_cast<uint32_t>( aspectReferences_.size() );
  60496. pAspectReferences = aspectReferences_.data();
  60497. return *this;
  60498. }
  60499. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60500. operator VkRenderPassInputAttachmentAspectCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  60501. {
  60502. return *reinterpret_cast<const VkRenderPassInputAttachmentAspectCreateInfo*>( this );
  60503. }
  60504. operator VkRenderPassInputAttachmentAspectCreateInfo &() VULKAN_HPP_NOEXCEPT
  60505. {
  60506. return *reinterpret_cast<VkRenderPassInputAttachmentAspectCreateInfo*>( this );
  60507. }
  60508. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60509. auto operator<=>( RenderPassInputAttachmentAspectCreateInfo const& ) const = default;
  60510. #else
  60511. bool operator==( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60512. {
  60513. return ( sType == rhs.sType )
  60514. && ( pNext == rhs.pNext )
  60515. && ( aspectReferenceCount == rhs.aspectReferenceCount )
  60516. && ( pAspectReferences == rhs.pAspectReferences );
  60517. }
  60518. bool operator!=( RenderPassInputAttachmentAspectCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60519. {
  60520. return !operator==( rhs );
  60521. }
  60522. #endif
  60523. public:
  60524. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassInputAttachmentAspectCreateInfo;
  60525. const void* pNext = {};
  60526. uint32_t aspectReferenceCount = {};
  60527. const VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference* pAspectReferences = {};
  60528. };
  60529. static_assert( sizeof( RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ), "struct and wrapper have different size!" );
  60530. static_assert( std::is_standard_layout<RenderPassInputAttachmentAspectCreateInfo>::value, "struct wrapper is not a standard layout!" );
  60531. template <>
  60532. struct CppType<StructureType, StructureType::eRenderPassInputAttachmentAspectCreateInfo>
  60533. {
  60534. using Type = RenderPassInputAttachmentAspectCreateInfo;
  60535. };
  60536. using RenderPassInputAttachmentAspectCreateInfoKHR = RenderPassInputAttachmentAspectCreateInfo;
  60537. struct RenderPassMultiviewCreateInfo
  60538. {
  60539. static const bool allowDuplicate = false;
  60540. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassMultiviewCreateInfo;
  60541. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60542. VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo(uint32_t subpassCount_ = {}, const uint32_t* pViewMasks_ = {}, uint32_t dependencyCount_ = {}, const int32_t* pViewOffsets_ = {}, uint32_t correlationMaskCount_ = {}, const uint32_t* pCorrelationMasks_ = {}) VULKAN_HPP_NOEXCEPT
  60543. : subpassCount( subpassCount_ ), pViewMasks( pViewMasks_ ), dependencyCount( dependencyCount_ ), pViewOffsets( pViewOffsets_ ), correlationMaskCount( correlationMaskCount_ ), pCorrelationMasks( pCorrelationMasks_ )
  60544. {}
  60545. VULKAN_HPP_CONSTEXPR RenderPassMultiviewCreateInfo( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60546. RenderPassMultiviewCreateInfo( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60547. : RenderPassMultiviewCreateInfo( *reinterpret_cast<RenderPassMultiviewCreateInfo const *>( &rhs ) )
  60548. {}
  60549. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60550. RenderPassMultiviewCreateInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ = {} )
  60551. : subpassCount( static_cast<uint32_t>( viewMasks_.size() ) ), pViewMasks( viewMasks_.data() ), dependencyCount( static_cast<uint32_t>( viewOffsets_.size() ) ), pViewOffsets( viewOffsets_.data() ), correlationMaskCount( static_cast<uint32_t>( correlationMasks_.size() ) ), pCorrelationMasks( correlationMasks_.data() )
  60552. {}
  60553. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60554. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60555. VULKAN_HPP_CONSTEXPR_14 RenderPassMultiviewCreateInfo & operator=( RenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60556. RenderPassMultiviewCreateInfo & operator=( VkRenderPassMultiviewCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60557. {
  60558. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo const *>( &rhs );
  60559. return *this;
  60560. }
  60561. RenderPassMultiviewCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60562. {
  60563. pNext = pNext_;
  60564. return *this;
  60565. }
  60566. RenderPassMultiviewCreateInfo & setSubpassCount( uint32_t subpassCount_ ) VULKAN_HPP_NOEXCEPT
  60567. {
  60568. subpassCount = subpassCount_;
  60569. return *this;
  60570. }
  60571. RenderPassMultiviewCreateInfo & setPViewMasks( const uint32_t* pViewMasks_ ) VULKAN_HPP_NOEXCEPT
  60572. {
  60573. pViewMasks = pViewMasks_;
  60574. return *this;
  60575. }
  60576. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60577. RenderPassMultiviewCreateInfo & setViewMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & viewMasks_ ) VULKAN_HPP_NOEXCEPT
  60578. {
  60579. subpassCount = static_cast<uint32_t>( viewMasks_.size() );
  60580. pViewMasks = viewMasks_.data();
  60581. return *this;
  60582. }
  60583. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60584. RenderPassMultiviewCreateInfo & setDependencyCount( uint32_t dependencyCount_ ) VULKAN_HPP_NOEXCEPT
  60585. {
  60586. dependencyCount = dependencyCount_;
  60587. return *this;
  60588. }
  60589. RenderPassMultiviewCreateInfo & setPViewOffsets( const int32_t* pViewOffsets_ ) VULKAN_HPP_NOEXCEPT
  60590. {
  60591. pViewOffsets = pViewOffsets_;
  60592. return *this;
  60593. }
  60594. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60595. RenderPassMultiviewCreateInfo & setViewOffsets( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const int32_t> const & viewOffsets_ ) VULKAN_HPP_NOEXCEPT
  60596. {
  60597. dependencyCount = static_cast<uint32_t>( viewOffsets_.size() );
  60598. pViewOffsets = viewOffsets_.data();
  60599. return *this;
  60600. }
  60601. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60602. RenderPassMultiviewCreateInfo & setCorrelationMaskCount( uint32_t correlationMaskCount_ ) VULKAN_HPP_NOEXCEPT
  60603. {
  60604. correlationMaskCount = correlationMaskCount_;
  60605. return *this;
  60606. }
  60607. RenderPassMultiviewCreateInfo & setPCorrelationMasks( const uint32_t* pCorrelationMasks_ ) VULKAN_HPP_NOEXCEPT
  60608. {
  60609. pCorrelationMasks = pCorrelationMasks_;
  60610. return *this;
  60611. }
  60612. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60613. RenderPassMultiviewCreateInfo & setCorrelationMasks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & correlationMasks_ ) VULKAN_HPP_NOEXCEPT
  60614. {
  60615. correlationMaskCount = static_cast<uint32_t>( correlationMasks_.size() );
  60616. pCorrelationMasks = correlationMasks_.data();
  60617. return *this;
  60618. }
  60619. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60620. operator VkRenderPassMultiviewCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  60621. {
  60622. return *reinterpret_cast<const VkRenderPassMultiviewCreateInfo*>( this );
  60623. }
  60624. operator VkRenderPassMultiviewCreateInfo &() VULKAN_HPP_NOEXCEPT
  60625. {
  60626. return *reinterpret_cast<VkRenderPassMultiviewCreateInfo*>( this );
  60627. }
  60628. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60629. auto operator<=>( RenderPassMultiviewCreateInfo const& ) const = default;
  60630. #else
  60631. bool operator==( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60632. {
  60633. return ( sType == rhs.sType )
  60634. && ( pNext == rhs.pNext )
  60635. && ( subpassCount == rhs.subpassCount )
  60636. && ( pViewMasks == rhs.pViewMasks )
  60637. && ( dependencyCount == rhs.dependencyCount )
  60638. && ( pViewOffsets == rhs.pViewOffsets )
  60639. && ( correlationMaskCount == rhs.correlationMaskCount )
  60640. && ( pCorrelationMasks == rhs.pCorrelationMasks );
  60641. }
  60642. bool operator!=( RenderPassMultiviewCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60643. {
  60644. return !operator==( rhs );
  60645. }
  60646. #endif
  60647. public:
  60648. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassMultiviewCreateInfo;
  60649. const void* pNext = {};
  60650. uint32_t subpassCount = {};
  60651. const uint32_t* pViewMasks = {};
  60652. uint32_t dependencyCount = {};
  60653. const int32_t* pViewOffsets = {};
  60654. uint32_t correlationMaskCount = {};
  60655. const uint32_t* pCorrelationMasks = {};
  60656. };
  60657. static_assert( sizeof( RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ), "struct and wrapper have different size!" );
  60658. static_assert( std::is_standard_layout<RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
  60659. template <>
  60660. struct CppType<StructureType, StructureType::eRenderPassMultiviewCreateInfo>
  60661. {
  60662. using Type = RenderPassMultiviewCreateInfo;
  60663. };
  60664. using RenderPassMultiviewCreateInfoKHR = RenderPassMultiviewCreateInfo;
  60665. struct SubpassSampleLocationsEXT
  60666. {
  60667. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60668. VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT(uint32_t subpassIndex_ = {}, VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo_ = {}) VULKAN_HPP_NOEXCEPT
  60669. : subpassIndex( subpassIndex_ ), sampleLocationsInfo( sampleLocationsInfo_ )
  60670. {}
  60671. VULKAN_HPP_CONSTEXPR SubpassSampleLocationsEXT( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60672. SubpassSampleLocationsEXT( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60673. : SubpassSampleLocationsEXT( *reinterpret_cast<SubpassSampleLocationsEXT const *>( &rhs ) )
  60674. {}
  60675. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60676. VULKAN_HPP_CONSTEXPR_14 SubpassSampleLocationsEXT & operator=( SubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60677. SubpassSampleLocationsEXT & operator=( VkSubpassSampleLocationsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60678. {
  60679. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT const *>( &rhs );
  60680. return *this;
  60681. }
  60682. SubpassSampleLocationsEXT & setSubpassIndex( uint32_t subpassIndex_ ) VULKAN_HPP_NOEXCEPT
  60683. {
  60684. subpassIndex = subpassIndex_;
  60685. return *this;
  60686. }
  60687. SubpassSampleLocationsEXT & setSampleLocationsInfo( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT const & sampleLocationsInfo_ ) VULKAN_HPP_NOEXCEPT
  60688. {
  60689. sampleLocationsInfo = sampleLocationsInfo_;
  60690. return *this;
  60691. }
  60692. operator VkSubpassSampleLocationsEXT const&() const VULKAN_HPP_NOEXCEPT
  60693. {
  60694. return *reinterpret_cast<const VkSubpassSampleLocationsEXT*>( this );
  60695. }
  60696. operator VkSubpassSampleLocationsEXT &() VULKAN_HPP_NOEXCEPT
  60697. {
  60698. return *reinterpret_cast<VkSubpassSampleLocationsEXT*>( this );
  60699. }
  60700. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60701. auto operator<=>( SubpassSampleLocationsEXT const& ) const = default;
  60702. #else
  60703. bool operator==( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60704. {
  60705. return ( subpassIndex == rhs.subpassIndex )
  60706. && ( sampleLocationsInfo == rhs.sampleLocationsInfo );
  60707. }
  60708. bool operator!=( SubpassSampleLocationsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60709. {
  60710. return !operator==( rhs );
  60711. }
  60712. #endif
  60713. public:
  60714. uint32_t subpassIndex = {};
  60715. VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT sampleLocationsInfo = {};
  60716. };
  60717. static_assert( sizeof( SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ), "struct and wrapper have different size!" );
  60718. static_assert( std::is_standard_layout<SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
  60719. struct RenderPassSampleLocationsBeginInfoEXT
  60720. {
  60721. static const bool allowDuplicate = false;
  60722. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
  60723. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60724. VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT(uint32_t attachmentInitialSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ = {}, uint32_t postSubpassSampleLocationsCount_ = {}, const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ = {}) VULKAN_HPP_NOEXCEPT
  60725. : attachmentInitialSampleLocationsCount( attachmentInitialSampleLocationsCount_ ), pAttachmentInitialSampleLocations( pAttachmentInitialSampleLocations_ ), postSubpassSampleLocationsCount( postSubpassSampleLocationsCount_ ), pPostSubpassSampleLocations( pPostSubpassSampleLocations_ )
  60726. {}
  60727. VULKAN_HPP_CONSTEXPR RenderPassSampleLocationsBeginInfoEXT( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60728. RenderPassSampleLocationsBeginInfoEXT( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60729. : RenderPassSampleLocationsBeginInfoEXT( *reinterpret_cast<RenderPassSampleLocationsBeginInfoEXT const *>( &rhs ) )
  60730. {}
  60731. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60732. RenderPassSampleLocationsBeginInfoEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ = {} )
  60733. : attachmentInitialSampleLocationsCount( static_cast<uint32_t>( attachmentInitialSampleLocations_.size() ) ), pAttachmentInitialSampleLocations( attachmentInitialSampleLocations_.data() ), postSubpassSampleLocationsCount( static_cast<uint32_t>( postSubpassSampleLocations_.size() ) ), pPostSubpassSampleLocations( postSubpassSampleLocations_.data() )
  60734. {}
  60735. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60736. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60737. VULKAN_HPP_CONSTEXPR_14 RenderPassSampleLocationsBeginInfoEXT & operator=( RenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60738. RenderPassSampleLocationsBeginInfoEXT & operator=( VkRenderPassSampleLocationsBeginInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60739. {
  60740. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT const *>( &rhs );
  60741. return *this;
  60742. }
  60743. RenderPassSampleLocationsBeginInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60744. {
  60745. pNext = pNext_;
  60746. return *this;
  60747. }
  60748. RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocationsCount( uint32_t attachmentInitialSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
  60749. {
  60750. attachmentInitialSampleLocationsCount = attachmentInitialSampleLocationsCount_;
  60751. return *this;
  60752. }
  60753. RenderPassSampleLocationsBeginInfoEXT & setPAttachmentInitialSampleLocations( const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  60754. {
  60755. pAttachmentInitialSampleLocations = pAttachmentInitialSampleLocations_;
  60756. return *this;
  60757. }
  60758. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60759. RenderPassSampleLocationsBeginInfoEXT & setAttachmentInitialSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT> const & attachmentInitialSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  60760. {
  60761. attachmentInitialSampleLocationsCount = static_cast<uint32_t>( attachmentInitialSampleLocations_.size() );
  60762. pAttachmentInitialSampleLocations = attachmentInitialSampleLocations_.data();
  60763. return *this;
  60764. }
  60765. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60766. RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocationsCount( uint32_t postSubpassSampleLocationsCount_ ) VULKAN_HPP_NOEXCEPT
  60767. {
  60768. postSubpassSampleLocationsCount = postSubpassSampleLocationsCount_;
  60769. return *this;
  60770. }
  60771. RenderPassSampleLocationsBeginInfoEXT & setPPostSubpassSampleLocations( const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  60772. {
  60773. pPostSubpassSampleLocations = pPostSubpassSampleLocations_;
  60774. return *this;
  60775. }
  60776. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60777. RenderPassSampleLocationsBeginInfoEXT & setPostSubpassSampleLocations( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT> const & postSubpassSampleLocations_ ) VULKAN_HPP_NOEXCEPT
  60778. {
  60779. postSubpassSampleLocationsCount = static_cast<uint32_t>( postSubpassSampleLocations_.size() );
  60780. pPostSubpassSampleLocations = postSubpassSampleLocations_.data();
  60781. return *this;
  60782. }
  60783. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  60784. operator VkRenderPassSampleLocationsBeginInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  60785. {
  60786. return *reinterpret_cast<const VkRenderPassSampleLocationsBeginInfoEXT*>( this );
  60787. }
  60788. operator VkRenderPassSampleLocationsBeginInfoEXT &() VULKAN_HPP_NOEXCEPT
  60789. {
  60790. return *reinterpret_cast<VkRenderPassSampleLocationsBeginInfoEXT*>( this );
  60791. }
  60792. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60793. auto operator<=>( RenderPassSampleLocationsBeginInfoEXT const& ) const = default;
  60794. #else
  60795. bool operator==( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60796. {
  60797. return ( sType == rhs.sType )
  60798. && ( pNext == rhs.pNext )
  60799. && ( attachmentInitialSampleLocationsCount == rhs.attachmentInitialSampleLocationsCount )
  60800. && ( pAttachmentInitialSampleLocations == rhs.pAttachmentInitialSampleLocations )
  60801. && ( postSubpassSampleLocationsCount == rhs.postSubpassSampleLocationsCount )
  60802. && ( pPostSubpassSampleLocations == rhs.pPostSubpassSampleLocations );
  60803. }
  60804. bool operator!=( RenderPassSampleLocationsBeginInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  60805. {
  60806. return !operator==( rhs );
  60807. }
  60808. #endif
  60809. public:
  60810. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassSampleLocationsBeginInfoEXT;
  60811. const void* pNext = {};
  60812. uint32_t attachmentInitialSampleLocationsCount = {};
  60813. const VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations = {};
  60814. uint32_t postSubpassSampleLocationsCount = {};
  60815. const VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT* pPostSubpassSampleLocations = {};
  60816. };
  60817. static_assert( sizeof( RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ), "struct and wrapper have different size!" );
  60818. static_assert( std::is_standard_layout<RenderPassSampleLocationsBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
  60819. template <>
  60820. struct CppType<StructureType, StructureType::eRenderPassSampleLocationsBeginInfoEXT>
  60821. {
  60822. using Type = RenderPassSampleLocationsBeginInfoEXT;
  60823. };
  60824. struct RenderPassTransformBeginInfoQCOM
  60825. {
  60826. static const bool allowDuplicate = false;
  60827. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eRenderPassTransformBeginInfoQCOM;
  60828. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60829. VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM(VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity) VULKAN_HPP_NOEXCEPT
  60830. : transform( transform_ )
  60831. {}
  60832. VULKAN_HPP_CONSTEXPR RenderPassTransformBeginInfoQCOM( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60833. RenderPassTransformBeginInfoQCOM( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  60834. : RenderPassTransformBeginInfoQCOM( *reinterpret_cast<RenderPassTransformBeginInfoQCOM const *>( &rhs ) )
  60835. {}
  60836. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60837. VULKAN_HPP_CONSTEXPR_14 RenderPassTransformBeginInfoQCOM & operator=( RenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60838. RenderPassTransformBeginInfoQCOM & operator=( VkRenderPassTransformBeginInfoQCOM const & rhs ) VULKAN_HPP_NOEXCEPT
  60839. {
  60840. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM const *>( &rhs );
  60841. return *this;
  60842. }
  60843. RenderPassTransformBeginInfoQCOM & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60844. {
  60845. pNext = pNext_;
  60846. return *this;
  60847. }
  60848. RenderPassTransformBeginInfoQCOM & setTransform( VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform_ ) VULKAN_HPP_NOEXCEPT
  60849. {
  60850. transform = transform_;
  60851. return *this;
  60852. }
  60853. operator VkRenderPassTransformBeginInfoQCOM const&() const VULKAN_HPP_NOEXCEPT
  60854. {
  60855. return *reinterpret_cast<const VkRenderPassTransformBeginInfoQCOM*>( this );
  60856. }
  60857. operator VkRenderPassTransformBeginInfoQCOM &() VULKAN_HPP_NOEXCEPT
  60858. {
  60859. return *reinterpret_cast<VkRenderPassTransformBeginInfoQCOM*>( this );
  60860. }
  60861. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60862. auto operator<=>( RenderPassTransformBeginInfoQCOM const& ) const = default;
  60863. #else
  60864. bool operator==( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  60865. {
  60866. return ( sType == rhs.sType )
  60867. && ( pNext == rhs.pNext )
  60868. && ( transform == rhs.transform );
  60869. }
  60870. bool operator!=( RenderPassTransformBeginInfoQCOM const& rhs ) const VULKAN_HPP_NOEXCEPT
  60871. {
  60872. return !operator==( rhs );
  60873. }
  60874. #endif
  60875. public:
  60876. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eRenderPassTransformBeginInfoQCOM;
  60877. void* pNext = {};
  60878. VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR transform = VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR::eIdentity;
  60879. };
  60880. static_assert( sizeof( RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ), "struct and wrapper have different size!" );
  60881. static_assert( std::is_standard_layout<RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
  60882. template <>
  60883. struct CppType<StructureType, StructureType::eRenderPassTransformBeginInfoQCOM>
  60884. {
  60885. using Type = RenderPassTransformBeginInfoQCOM;
  60886. };
  60887. struct SamplerCustomBorderColorCreateInfoEXT
  60888. {
  60889. static const bool allowDuplicate = false;
  60890. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
  60891. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60892. SamplerCustomBorderColorCreateInfoEXT(VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor_ = {}, VULKAN_HPP_NAMESPACE::Format format_ = VULKAN_HPP_NAMESPACE::Format::eUndefined) VULKAN_HPP_NOEXCEPT
  60893. : customBorderColor( customBorderColor_ ), format( format_ )
  60894. {}
  60895. SamplerCustomBorderColorCreateInfoEXT( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60896. SamplerCustomBorderColorCreateInfoEXT( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60897. : SamplerCustomBorderColorCreateInfoEXT( *reinterpret_cast<SamplerCustomBorderColorCreateInfoEXT const *>( &rhs ) )
  60898. {}
  60899. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60900. SamplerCustomBorderColorCreateInfoEXT & operator=( SamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60901. SamplerCustomBorderColorCreateInfoEXT & operator=( VkSamplerCustomBorderColorCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  60902. {
  60903. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT const *>( &rhs );
  60904. return *this;
  60905. }
  60906. SamplerCustomBorderColorCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60907. {
  60908. pNext = pNext_;
  60909. return *this;
  60910. }
  60911. SamplerCustomBorderColorCreateInfoEXT & setCustomBorderColor( VULKAN_HPP_NAMESPACE::ClearColorValue const & customBorderColor_ ) VULKAN_HPP_NOEXCEPT
  60912. {
  60913. customBorderColor = customBorderColor_;
  60914. return *this;
  60915. }
  60916. SamplerCustomBorderColorCreateInfoEXT & setFormat( VULKAN_HPP_NAMESPACE::Format format_ ) VULKAN_HPP_NOEXCEPT
  60917. {
  60918. format = format_;
  60919. return *this;
  60920. }
  60921. operator VkSamplerCustomBorderColorCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  60922. {
  60923. return *reinterpret_cast<const VkSamplerCustomBorderColorCreateInfoEXT*>( this );
  60924. }
  60925. operator VkSamplerCustomBorderColorCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  60926. {
  60927. return *reinterpret_cast<VkSamplerCustomBorderColorCreateInfoEXT*>( this );
  60928. }
  60929. public:
  60930. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerCustomBorderColorCreateInfoEXT;
  60931. const void* pNext = {};
  60932. VULKAN_HPP_NAMESPACE::ClearColorValue customBorderColor = {};
  60933. VULKAN_HPP_NAMESPACE::Format format = VULKAN_HPP_NAMESPACE::Format::eUndefined;
  60934. };
  60935. static_assert( sizeof( SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ), "struct and wrapper have different size!" );
  60936. static_assert( std::is_standard_layout<SamplerCustomBorderColorCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  60937. template <>
  60938. struct CppType<StructureType, StructureType::eSamplerCustomBorderColorCreateInfoEXT>
  60939. {
  60940. using Type = SamplerCustomBorderColorCreateInfoEXT;
  60941. };
  60942. struct SamplerReductionModeCreateInfo
  60943. {
  60944. static const bool allowDuplicate = false;
  60945. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerReductionModeCreateInfo;
  60946. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60947. VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo(VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage) VULKAN_HPP_NOEXCEPT
  60948. : reductionMode( reductionMode_ )
  60949. {}
  60950. VULKAN_HPP_CONSTEXPR SamplerReductionModeCreateInfo( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60951. SamplerReductionModeCreateInfo( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60952. : SamplerReductionModeCreateInfo( *reinterpret_cast<SamplerReductionModeCreateInfo const *>( &rhs ) )
  60953. {}
  60954. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  60955. VULKAN_HPP_CONSTEXPR_14 SamplerReductionModeCreateInfo & operator=( SamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  60956. SamplerReductionModeCreateInfo & operator=( VkSamplerReductionModeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  60957. {
  60958. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo const *>( &rhs );
  60959. return *this;
  60960. }
  60961. SamplerReductionModeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  60962. {
  60963. pNext = pNext_;
  60964. return *this;
  60965. }
  60966. SamplerReductionModeCreateInfo & setReductionMode( VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode_ ) VULKAN_HPP_NOEXCEPT
  60967. {
  60968. reductionMode = reductionMode_;
  60969. return *this;
  60970. }
  60971. operator VkSamplerReductionModeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  60972. {
  60973. return *reinterpret_cast<const VkSamplerReductionModeCreateInfo*>( this );
  60974. }
  60975. operator VkSamplerReductionModeCreateInfo &() VULKAN_HPP_NOEXCEPT
  60976. {
  60977. return *reinterpret_cast<VkSamplerReductionModeCreateInfo*>( this );
  60978. }
  60979. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  60980. auto operator<=>( SamplerReductionModeCreateInfo const& ) const = default;
  60981. #else
  60982. bool operator==( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60983. {
  60984. return ( sType == rhs.sType )
  60985. && ( pNext == rhs.pNext )
  60986. && ( reductionMode == rhs.reductionMode );
  60987. }
  60988. bool operator!=( SamplerReductionModeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  60989. {
  60990. return !operator==( rhs );
  60991. }
  60992. #endif
  60993. public:
  60994. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerReductionModeCreateInfo;
  60995. const void* pNext = {};
  60996. VULKAN_HPP_NAMESPACE::SamplerReductionMode reductionMode = VULKAN_HPP_NAMESPACE::SamplerReductionMode::eWeightedAverage;
  60997. };
  60998. static_assert( sizeof( SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ), "struct and wrapper have different size!" );
  60999. static_assert( std::is_standard_layout<SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
  61000. template <>
  61001. struct CppType<StructureType, StructureType::eSamplerReductionModeCreateInfo>
  61002. {
  61003. using Type = SamplerReductionModeCreateInfo;
  61004. };
  61005. using SamplerReductionModeCreateInfoEXT = SamplerReductionModeCreateInfo;
  61006. struct SamplerYcbcrConversionImageFormatProperties
  61007. {
  61008. static const bool allowDuplicate = false;
  61009. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
  61010. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61011. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties(uint32_t combinedImageSamplerDescriptorCount_ = {}) VULKAN_HPP_NOEXCEPT
  61012. : combinedImageSamplerDescriptorCount( combinedImageSamplerDescriptorCount_ )
  61013. {}
  61014. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionImageFormatProperties( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61015. SamplerYcbcrConversionImageFormatProperties( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  61016. : SamplerYcbcrConversionImageFormatProperties( *reinterpret_cast<SamplerYcbcrConversionImageFormatProperties const *>( &rhs ) )
  61017. {}
  61018. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61019. VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionImageFormatProperties & operator=( SamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61020. SamplerYcbcrConversionImageFormatProperties & operator=( VkSamplerYcbcrConversionImageFormatProperties const & rhs ) VULKAN_HPP_NOEXCEPT
  61021. {
  61022. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties const *>( &rhs );
  61023. return *this;
  61024. }
  61025. operator VkSamplerYcbcrConversionImageFormatProperties const&() const VULKAN_HPP_NOEXCEPT
  61026. {
  61027. return *reinterpret_cast<const VkSamplerYcbcrConversionImageFormatProperties*>( this );
  61028. }
  61029. operator VkSamplerYcbcrConversionImageFormatProperties &() VULKAN_HPP_NOEXCEPT
  61030. {
  61031. return *reinterpret_cast<VkSamplerYcbcrConversionImageFormatProperties*>( this );
  61032. }
  61033. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61034. auto operator<=>( SamplerYcbcrConversionImageFormatProperties const& ) const = default;
  61035. #else
  61036. bool operator==( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  61037. {
  61038. return ( sType == rhs.sType )
  61039. && ( pNext == rhs.pNext )
  61040. && ( combinedImageSamplerDescriptorCount == rhs.combinedImageSamplerDescriptorCount );
  61041. }
  61042. bool operator!=( SamplerYcbcrConversionImageFormatProperties const& rhs ) const VULKAN_HPP_NOEXCEPT
  61043. {
  61044. return !operator==( rhs );
  61045. }
  61046. #endif
  61047. public:
  61048. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionImageFormatProperties;
  61049. void* pNext = {};
  61050. uint32_t combinedImageSamplerDescriptorCount = {};
  61051. };
  61052. static_assert( sizeof( SamplerYcbcrConversionImageFormatProperties ) == sizeof( VkSamplerYcbcrConversionImageFormatProperties ), "struct and wrapper have different size!" );
  61053. static_assert( std::is_standard_layout<SamplerYcbcrConversionImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
  61054. template <>
  61055. struct CppType<StructureType, StructureType::eSamplerYcbcrConversionImageFormatProperties>
  61056. {
  61057. using Type = SamplerYcbcrConversionImageFormatProperties;
  61058. };
  61059. using SamplerYcbcrConversionImageFormatPropertiesKHR = SamplerYcbcrConversionImageFormatProperties;
  61060. struct SamplerYcbcrConversionInfo
  61061. {
  61062. static const bool allowDuplicate = false;
  61063. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSamplerYcbcrConversionInfo;
  61064. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61065. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ = {}) VULKAN_HPP_NOEXCEPT
  61066. : conversion( conversion_ )
  61067. {}
  61068. VULKAN_HPP_CONSTEXPR SamplerYcbcrConversionInfo( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61069. SamplerYcbcrConversionInfo( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  61070. : SamplerYcbcrConversionInfo( *reinterpret_cast<SamplerYcbcrConversionInfo const *>( &rhs ) )
  61071. {}
  61072. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61073. VULKAN_HPP_CONSTEXPR_14 SamplerYcbcrConversionInfo & operator=( SamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61074. SamplerYcbcrConversionInfo & operator=( VkSamplerYcbcrConversionInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  61075. {
  61076. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo const *>( &rhs );
  61077. return *this;
  61078. }
  61079. SamplerYcbcrConversionInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61080. {
  61081. pNext = pNext_;
  61082. return *this;
  61083. }
  61084. SamplerYcbcrConversionInfo & setConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion_ ) VULKAN_HPP_NOEXCEPT
  61085. {
  61086. conversion = conversion_;
  61087. return *this;
  61088. }
  61089. operator VkSamplerYcbcrConversionInfo const&() const VULKAN_HPP_NOEXCEPT
  61090. {
  61091. return *reinterpret_cast<const VkSamplerYcbcrConversionInfo*>( this );
  61092. }
  61093. operator VkSamplerYcbcrConversionInfo &() VULKAN_HPP_NOEXCEPT
  61094. {
  61095. return *reinterpret_cast<VkSamplerYcbcrConversionInfo*>( this );
  61096. }
  61097. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61098. auto operator<=>( SamplerYcbcrConversionInfo const& ) const = default;
  61099. #else
  61100. bool operator==( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  61101. {
  61102. return ( sType == rhs.sType )
  61103. && ( pNext == rhs.pNext )
  61104. && ( conversion == rhs.conversion );
  61105. }
  61106. bool operator!=( SamplerYcbcrConversionInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  61107. {
  61108. return !operator==( rhs );
  61109. }
  61110. #endif
  61111. public:
  61112. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSamplerYcbcrConversionInfo;
  61113. const void* pNext = {};
  61114. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion conversion = {};
  61115. };
  61116. static_assert( sizeof( SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ), "struct and wrapper have different size!" );
  61117. static_assert( std::is_standard_layout<SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
  61118. template <>
  61119. struct CppType<StructureType, StructureType::eSamplerYcbcrConversionInfo>
  61120. {
  61121. using Type = SamplerYcbcrConversionInfo;
  61122. };
  61123. using SamplerYcbcrConversionInfoKHR = SamplerYcbcrConversionInfo;
  61124. struct SemaphoreTypeCreateInfo
  61125. {
  61126. static const bool allowDuplicate = false;
  61127. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSemaphoreTypeCreateInfo;
  61128. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61129. VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo(VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary, uint64_t initialValue_ = {}) VULKAN_HPP_NOEXCEPT
  61130. : semaphoreType( semaphoreType_ ), initialValue( initialValue_ )
  61131. {}
  61132. VULKAN_HPP_CONSTEXPR SemaphoreTypeCreateInfo( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61133. SemaphoreTypeCreateInfo( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  61134. : SemaphoreTypeCreateInfo( *reinterpret_cast<SemaphoreTypeCreateInfo const *>( &rhs ) )
  61135. {}
  61136. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61137. VULKAN_HPP_CONSTEXPR_14 SemaphoreTypeCreateInfo & operator=( SemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61138. SemaphoreTypeCreateInfo & operator=( VkSemaphoreTypeCreateInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  61139. {
  61140. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo const *>( &rhs );
  61141. return *this;
  61142. }
  61143. SemaphoreTypeCreateInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61144. {
  61145. pNext = pNext_;
  61146. return *this;
  61147. }
  61148. SemaphoreTypeCreateInfo & setSemaphoreType( VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType_ ) VULKAN_HPP_NOEXCEPT
  61149. {
  61150. semaphoreType = semaphoreType_;
  61151. return *this;
  61152. }
  61153. SemaphoreTypeCreateInfo & setInitialValue( uint64_t initialValue_ ) VULKAN_HPP_NOEXCEPT
  61154. {
  61155. initialValue = initialValue_;
  61156. return *this;
  61157. }
  61158. operator VkSemaphoreTypeCreateInfo const&() const VULKAN_HPP_NOEXCEPT
  61159. {
  61160. return *reinterpret_cast<const VkSemaphoreTypeCreateInfo*>( this );
  61161. }
  61162. operator VkSemaphoreTypeCreateInfo &() VULKAN_HPP_NOEXCEPT
  61163. {
  61164. return *reinterpret_cast<VkSemaphoreTypeCreateInfo*>( this );
  61165. }
  61166. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61167. auto operator<=>( SemaphoreTypeCreateInfo const& ) const = default;
  61168. #else
  61169. bool operator==( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  61170. {
  61171. return ( sType == rhs.sType )
  61172. && ( pNext == rhs.pNext )
  61173. && ( semaphoreType == rhs.semaphoreType )
  61174. && ( initialValue == rhs.initialValue );
  61175. }
  61176. bool operator!=( SemaphoreTypeCreateInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  61177. {
  61178. return !operator==( rhs );
  61179. }
  61180. #endif
  61181. public:
  61182. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSemaphoreTypeCreateInfo;
  61183. const void* pNext = {};
  61184. VULKAN_HPP_NAMESPACE::SemaphoreType semaphoreType = VULKAN_HPP_NAMESPACE::SemaphoreType::eBinary;
  61185. uint64_t initialValue = {};
  61186. };
  61187. static_assert( sizeof( SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ), "struct and wrapper have different size!" );
  61188. static_assert( std::is_standard_layout<SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
  61189. template <>
  61190. struct CppType<StructureType, StructureType::eSemaphoreTypeCreateInfo>
  61191. {
  61192. using Type = SemaphoreTypeCreateInfo;
  61193. };
  61194. using SemaphoreTypeCreateInfoKHR = SemaphoreTypeCreateInfo;
  61195. struct SetStateFlagsIndirectCommandNV
  61196. {
  61197. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61198. VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV(uint32_t data_ = {}) VULKAN_HPP_NOEXCEPT
  61199. : data( data_ )
  61200. {}
  61201. VULKAN_HPP_CONSTEXPR SetStateFlagsIndirectCommandNV( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61202. SetStateFlagsIndirectCommandNV( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  61203. : SetStateFlagsIndirectCommandNV( *reinterpret_cast<SetStateFlagsIndirectCommandNV const *>( &rhs ) )
  61204. {}
  61205. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61206. VULKAN_HPP_CONSTEXPR_14 SetStateFlagsIndirectCommandNV & operator=( SetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61207. SetStateFlagsIndirectCommandNV & operator=( VkSetStateFlagsIndirectCommandNV const & rhs ) VULKAN_HPP_NOEXCEPT
  61208. {
  61209. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV const *>( &rhs );
  61210. return *this;
  61211. }
  61212. SetStateFlagsIndirectCommandNV & setData( uint32_t data_ ) VULKAN_HPP_NOEXCEPT
  61213. {
  61214. data = data_;
  61215. return *this;
  61216. }
  61217. operator VkSetStateFlagsIndirectCommandNV const&() const VULKAN_HPP_NOEXCEPT
  61218. {
  61219. return *reinterpret_cast<const VkSetStateFlagsIndirectCommandNV*>( this );
  61220. }
  61221. operator VkSetStateFlagsIndirectCommandNV &() VULKAN_HPP_NOEXCEPT
  61222. {
  61223. return *reinterpret_cast<VkSetStateFlagsIndirectCommandNV*>( this );
  61224. }
  61225. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61226. auto operator<=>( SetStateFlagsIndirectCommandNV const& ) const = default;
  61227. #else
  61228. bool operator==( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  61229. {
  61230. return ( data == rhs.data );
  61231. }
  61232. bool operator!=( SetStateFlagsIndirectCommandNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  61233. {
  61234. return !operator==( rhs );
  61235. }
  61236. #endif
  61237. public:
  61238. uint32_t data = {};
  61239. };
  61240. static_assert( sizeof( SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ), "struct and wrapper have different size!" );
  61241. static_assert( std::is_standard_layout<SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
  61242. struct ShaderModuleValidationCacheCreateInfoEXT
  61243. {
  61244. static const bool allowDuplicate = false;
  61245. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
  61246. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61247. VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT(VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ = {}) VULKAN_HPP_NOEXCEPT
  61248. : validationCache( validationCache_ )
  61249. {}
  61250. VULKAN_HPP_CONSTEXPR ShaderModuleValidationCacheCreateInfoEXT( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61251. ShaderModuleValidationCacheCreateInfoEXT( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61252. : ShaderModuleValidationCacheCreateInfoEXT( *reinterpret_cast<ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs ) )
  61253. {}
  61254. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61255. VULKAN_HPP_CONSTEXPR_14 ShaderModuleValidationCacheCreateInfoEXT & operator=( ShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61256. ShaderModuleValidationCacheCreateInfoEXT & operator=( VkShaderModuleValidationCacheCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61257. {
  61258. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT const *>( &rhs );
  61259. return *this;
  61260. }
  61261. ShaderModuleValidationCacheCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61262. {
  61263. pNext = pNext_;
  61264. return *this;
  61265. }
  61266. ShaderModuleValidationCacheCreateInfoEXT & setValidationCache( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache_ ) VULKAN_HPP_NOEXCEPT
  61267. {
  61268. validationCache = validationCache_;
  61269. return *this;
  61270. }
  61271. operator VkShaderModuleValidationCacheCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  61272. {
  61273. return *reinterpret_cast<const VkShaderModuleValidationCacheCreateInfoEXT*>( this );
  61274. }
  61275. operator VkShaderModuleValidationCacheCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  61276. {
  61277. return *reinterpret_cast<VkShaderModuleValidationCacheCreateInfoEXT*>( this );
  61278. }
  61279. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61280. auto operator<=>( ShaderModuleValidationCacheCreateInfoEXT const& ) const = default;
  61281. #else
  61282. bool operator==( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61283. {
  61284. return ( sType == rhs.sType )
  61285. && ( pNext == rhs.pNext )
  61286. && ( validationCache == rhs.validationCache );
  61287. }
  61288. bool operator!=( ShaderModuleValidationCacheCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61289. {
  61290. return !operator==( rhs );
  61291. }
  61292. #endif
  61293. public:
  61294. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eShaderModuleValidationCacheCreateInfoEXT;
  61295. const void* pNext = {};
  61296. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache = {};
  61297. };
  61298. static_assert( sizeof( ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ), "struct and wrapper have different size!" );
  61299. static_assert( std::is_standard_layout<ShaderModuleValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  61300. template <>
  61301. struct CppType<StructureType, StructureType::eShaderModuleValidationCacheCreateInfoEXT>
  61302. {
  61303. using Type = ShaderModuleValidationCacheCreateInfoEXT;
  61304. };
  61305. struct ShaderResourceUsageAMD
  61306. {
  61307. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61308. VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD(uint32_t numUsedVgprs_ = {}, uint32_t numUsedSgprs_ = {}, uint32_t ldsSizePerLocalWorkGroup_ = {}, size_t ldsUsageSizeInBytes_ = {}, size_t scratchMemUsageInBytes_ = {}) VULKAN_HPP_NOEXCEPT
  61309. : numUsedVgprs( numUsedVgprs_ ), numUsedSgprs( numUsedSgprs_ ), ldsSizePerLocalWorkGroup( ldsSizePerLocalWorkGroup_ ), ldsUsageSizeInBytes( ldsUsageSizeInBytes_ ), scratchMemUsageInBytes( scratchMemUsageInBytes_ )
  61310. {}
  61311. VULKAN_HPP_CONSTEXPR ShaderResourceUsageAMD( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61312. ShaderResourceUsageAMD( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61313. : ShaderResourceUsageAMD( *reinterpret_cast<ShaderResourceUsageAMD const *>( &rhs ) )
  61314. {}
  61315. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61316. VULKAN_HPP_CONSTEXPR_14 ShaderResourceUsageAMD & operator=( ShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61317. ShaderResourceUsageAMD & operator=( VkShaderResourceUsageAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61318. {
  61319. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD const *>( &rhs );
  61320. return *this;
  61321. }
  61322. operator VkShaderResourceUsageAMD const&() const VULKAN_HPP_NOEXCEPT
  61323. {
  61324. return *reinterpret_cast<const VkShaderResourceUsageAMD*>( this );
  61325. }
  61326. operator VkShaderResourceUsageAMD &() VULKAN_HPP_NOEXCEPT
  61327. {
  61328. return *reinterpret_cast<VkShaderResourceUsageAMD*>( this );
  61329. }
  61330. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61331. auto operator<=>( ShaderResourceUsageAMD const& ) const = default;
  61332. #else
  61333. bool operator==( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61334. {
  61335. return ( numUsedVgprs == rhs.numUsedVgprs )
  61336. && ( numUsedSgprs == rhs.numUsedSgprs )
  61337. && ( ldsSizePerLocalWorkGroup == rhs.ldsSizePerLocalWorkGroup )
  61338. && ( ldsUsageSizeInBytes == rhs.ldsUsageSizeInBytes )
  61339. && ( scratchMemUsageInBytes == rhs.scratchMemUsageInBytes );
  61340. }
  61341. bool operator!=( ShaderResourceUsageAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61342. {
  61343. return !operator==( rhs );
  61344. }
  61345. #endif
  61346. public:
  61347. uint32_t numUsedVgprs = {};
  61348. uint32_t numUsedSgprs = {};
  61349. uint32_t ldsSizePerLocalWorkGroup = {};
  61350. size_t ldsUsageSizeInBytes = {};
  61351. size_t scratchMemUsageInBytes = {};
  61352. };
  61353. static_assert( sizeof( ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ), "struct and wrapper have different size!" );
  61354. static_assert( std::is_standard_layout<ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
  61355. struct ShaderStatisticsInfoAMD
  61356. {
  61357. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61358. VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD(VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask_ = {}, VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage_ = {}, uint32_t numPhysicalVgprs_ = {}, uint32_t numPhysicalSgprs_ = {}, uint32_t numAvailableVgprs_ = {}, uint32_t numAvailableSgprs_ = {}, std::array<uint32_t,3> const& computeWorkGroupSize_ = {}) VULKAN_HPP_NOEXCEPT
  61359. : shaderStageMask( shaderStageMask_ ), resourceUsage( resourceUsage_ ), numPhysicalVgprs( numPhysicalVgprs_ ), numPhysicalSgprs( numPhysicalSgprs_ ), numAvailableVgprs( numAvailableVgprs_ ), numAvailableSgprs( numAvailableSgprs_ ), computeWorkGroupSize( computeWorkGroupSize_ )
  61360. {}
  61361. VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61362. ShaderStatisticsInfoAMD( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61363. : ShaderStatisticsInfoAMD( *reinterpret_cast<ShaderStatisticsInfoAMD const *>( &rhs ) )
  61364. {}
  61365. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61366. VULKAN_HPP_CONSTEXPR_14 ShaderStatisticsInfoAMD & operator=( ShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61367. ShaderStatisticsInfoAMD & operator=( VkShaderStatisticsInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61368. {
  61369. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD const *>( &rhs );
  61370. return *this;
  61371. }
  61372. operator VkShaderStatisticsInfoAMD const&() const VULKAN_HPP_NOEXCEPT
  61373. {
  61374. return *reinterpret_cast<const VkShaderStatisticsInfoAMD*>( this );
  61375. }
  61376. operator VkShaderStatisticsInfoAMD &() VULKAN_HPP_NOEXCEPT
  61377. {
  61378. return *reinterpret_cast<VkShaderStatisticsInfoAMD*>( this );
  61379. }
  61380. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61381. auto operator<=>( ShaderStatisticsInfoAMD const& ) const = default;
  61382. #else
  61383. bool operator==( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61384. {
  61385. return ( shaderStageMask == rhs.shaderStageMask )
  61386. && ( resourceUsage == rhs.resourceUsage )
  61387. && ( numPhysicalVgprs == rhs.numPhysicalVgprs )
  61388. && ( numPhysicalSgprs == rhs.numPhysicalSgprs )
  61389. && ( numAvailableVgprs == rhs.numAvailableVgprs )
  61390. && ( numAvailableSgprs == rhs.numAvailableSgprs )
  61391. && ( computeWorkGroupSize == rhs.computeWorkGroupSize );
  61392. }
  61393. bool operator!=( ShaderStatisticsInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61394. {
  61395. return !operator==( rhs );
  61396. }
  61397. #endif
  61398. public:
  61399. VULKAN_HPP_NAMESPACE::ShaderStageFlags shaderStageMask = {};
  61400. VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD resourceUsage = {};
  61401. uint32_t numPhysicalVgprs = {};
  61402. uint32_t numPhysicalSgprs = {};
  61403. uint32_t numAvailableVgprs = {};
  61404. uint32_t numAvailableSgprs = {};
  61405. VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, 3> computeWorkGroupSize = {};
  61406. };
  61407. static_assert( sizeof( ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ), "struct and wrapper have different size!" );
  61408. static_assert( std::is_standard_layout<ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
  61409. struct SharedPresentSurfaceCapabilitiesKHR
  61410. {
  61411. static const bool allowDuplicate = false;
  61412. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
  61413. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61414. VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR(VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags_ = {}) VULKAN_HPP_NOEXCEPT
  61415. : sharedPresentSupportedUsageFlags( sharedPresentSupportedUsageFlags_ )
  61416. {}
  61417. VULKAN_HPP_CONSTEXPR SharedPresentSurfaceCapabilitiesKHR( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61418. SharedPresentSurfaceCapabilitiesKHR( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  61419. : SharedPresentSurfaceCapabilitiesKHR( *reinterpret_cast<SharedPresentSurfaceCapabilitiesKHR const *>( &rhs ) )
  61420. {}
  61421. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61422. VULKAN_HPP_CONSTEXPR_14 SharedPresentSurfaceCapabilitiesKHR & operator=( SharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61423. SharedPresentSurfaceCapabilitiesKHR & operator=( VkSharedPresentSurfaceCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  61424. {
  61425. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR const *>( &rhs );
  61426. return *this;
  61427. }
  61428. operator VkSharedPresentSurfaceCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
  61429. {
  61430. return *reinterpret_cast<const VkSharedPresentSurfaceCapabilitiesKHR*>( this );
  61431. }
  61432. operator VkSharedPresentSurfaceCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
  61433. {
  61434. return *reinterpret_cast<VkSharedPresentSurfaceCapabilitiesKHR*>( this );
  61435. }
  61436. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61437. auto operator<=>( SharedPresentSurfaceCapabilitiesKHR const& ) const = default;
  61438. #else
  61439. bool operator==( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  61440. {
  61441. return ( sType == rhs.sType )
  61442. && ( pNext == rhs.pNext )
  61443. && ( sharedPresentSupportedUsageFlags == rhs.sharedPresentSupportedUsageFlags );
  61444. }
  61445. bool operator!=( SharedPresentSurfaceCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  61446. {
  61447. return !operator==( rhs );
  61448. }
  61449. #endif
  61450. public:
  61451. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSharedPresentSurfaceCapabilitiesKHR;
  61452. void* pNext = {};
  61453. VULKAN_HPP_NAMESPACE::ImageUsageFlags sharedPresentSupportedUsageFlags = {};
  61454. };
  61455. static_assert( sizeof( SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ), "struct and wrapper have different size!" );
  61456. static_assert( std::is_standard_layout<SharedPresentSurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
  61457. template <>
  61458. struct CppType<StructureType, StructureType::eSharedPresentSurfaceCapabilitiesKHR>
  61459. {
  61460. using Type = SharedPresentSurfaceCapabilitiesKHR;
  61461. };
  61462. #ifdef VK_USE_PLATFORM_GGP
  61463. struct StreamDescriptorSurfaceCreateInfoGGP
  61464. {
  61465. static const bool allowDuplicate = false;
  61466. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
  61467. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61468. VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP(VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ = {}, GgpStreamDescriptor streamDescriptor_ = {}) VULKAN_HPP_NOEXCEPT
  61469. : flags( flags_ ), streamDescriptor( streamDescriptor_ )
  61470. {}
  61471. VULKAN_HPP_CONSTEXPR StreamDescriptorSurfaceCreateInfoGGP( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61472. StreamDescriptorSurfaceCreateInfoGGP( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
  61473. : StreamDescriptorSurfaceCreateInfoGGP( *reinterpret_cast<StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs ) )
  61474. {}
  61475. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61476. VULKAN_HPP_CONSTEXPR_14 StreamDescriptorSurfaceCreateInfoGGP & operator=( StreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61477. StreamDescriptorSurfaceCreateInfoGGP & operator=( VkStreamDescriptorSurfaceCreateInfoGGP const & rhs ) VULKAN_HPP_NOEXCEPT
  61478. {
  61479. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP const *>( &rhs );
  61480. return *this;
  61481. }
  61482. StreamDescriptorSurfaceCreateInfoGGP & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61483. {
  61484. pNext = pNext_;
  61485. return *this;
  61486. }
  61487. StreamDescriptorSurfaceCreateInfoGGP & setFlags( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags_ ) VULKAN_HPP_NOEXCEPT
  61488. {
  61489. flags = flags_;
  61490. return *this;
  61491. }
  61492. StreamDescriptorSurfaceCreateInfoGGP & setStreamDescriptor( GgpStreamDescriptor streamDescriptor_ ) VULKAN_HPP_NOEXCEPT
  61493. {
  61494. streamDescriptor = streamDescriptor_;
  61495. return *this;
  61496. }
  61497. operator VkStreamDescriptorSurfaceCreateInfoGGP const&() const VULKAN_HPP_NOEXCEPT
  61498. {
  61499. return *reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
  61500. }
  61501. operator VkStreamDescriptorSurfaceCreateInfoGGP &() VULKAN_HPP_NOEXCEPT
  61502. {
  61503. return *reinterpret_cast<VkStreamDescriptorSurfaceCreateInfoGGP*>( this );
  61504. }
  61505. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61506. auto operator<=>( StreamDescriptorSurfaceCreateInfoGGP const& ) const = default;
  61507. #else
  61508. bool operator==( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
  61509. {
  61510. return ( sType == rhs.sType )
  61511. && ( pNext == rhs.pNext )
  61512. && ( flags == rhs.flags )
  61513. && ( memcmp( &streamDescriptor, &rhs.streamDescriptor, sizeof( GgpStreamDescriptor ) ) == 0 );
  61514. }
  61515. bool operator!=( StreamDescriptorSurfaceCreateInfoGGP const& rhs ) const VULKAN_HPP_NOEXCEPT
  61516. {
  61517. return !operator==( rhs );
  61518. }
  61519. #endif
  61520. public:
  61521. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eStreamDescriptorSurfaceCreateInfoGGP;
  61522. const void* pNext = {};
  61523. VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP flags = {};
  61524. GgpStreamDescriptor streamDescriptor = {};
  61525. };
  61526. static_assert( sizeof( StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ), "struct and wrapper have different size!" );
  61527. static_assert( std::is_standard_layout<StreamDescriptorSurfaceCreateInfoGGP>::value, "struct wrapper is not a standard layout!" );
  61528. template <>
  61529. struct CppType<StructureType, StructureType::eStreamDescriptorSurfaceCreateInfoGGP>
  61530. {
  61531. using Type = StreamDescriptorSurfaceCreateInfoGGP;
  61532. };
  61533. #endif /*VK_USE_PLATFORM_GGP*/
  61534. struct SubpassDescriptionDepthStencilResolve
  61535. {
  61536. static const bool allowDuplicate = false;
  61537. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSubpassDescriptionDepthStencilResolve;
  61538. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61539. VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve(VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone, const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ = {}) VULKAN_HPP_NOEXCEPT
  61540. : depthResolveMode( depthResolveMode_ ), stencilResolveMode( stencilResolveMode_ ), pDepthStencilResolveAttachment( pDepthStencilResolveAttachment_ )
  61541. {}
  61542. VULKAN_HPP_CONSTEXPR SubpassDescriptionDepthStencilResolve( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61543. SubpassDescriptionDepthStencilResolve( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
  61544. : SubpassDescriptionDepthStencilResolve( *reinterpret_cast<SubpassDescriptionDepthStencilResolve const *>( &rhs ) )
  61545. {}
  61546. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61547. VULKAN_HPP_CONSTEXPR_14 SubpassDescriptionDepthStencilResolve & operator=( SubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61548. SubpassDescriptionDepthStencilResolve & operator=( VkSubpassDescriptionDepthStencilResolve const & rhs ) VULKAN_HPP_NOEXCEPT
  61549. {
  61550. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve const *>( &rhs );
  61551. return *this;
  61552. }
  61553. SubpassDescriptionDepthStencilResolve & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61554. {
  61555. pNext = pNext_;
  61556. return *this;
  61557. }
  61558. SubpassDescriptionDepthStencilResolve & setDepthResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode_ ) VULKAN_HPP_NOEXCEPT
  61559. {
  61560. depthResolveMode = depthResolveMode_;
  61561. return *this;
  61562. }
  61563. SubpassDescriptionDepthStencilResolve & setStencilResolveMode( VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode_ ) VULKAN_HPP_NOEXCEPT
  61564. {
  61565. stencilResolveMode = stencilResolveMode_;
  61566. return *this;
  61567. }
  61568. SubpassDescriptionDepthStencilResolve & setPDepthStencilResolveAttachment( const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment_ ) VULKAN_HPP_NOEXCEPT
  61569. {
  61570. pDepthStencilResolveAttachment = pDepthStencilResolveAttachment_;
  61571. return *this;
  61572. }
  61573. operator VkSubpassDescriptionDepthStencilResolve const&() const VULKAN_HPP_NOEXCEPT
  61574. {
  61575. return *reinterpret_cast<const VkSubpassDescriptionDepthStencilResolve*>( this );
  61576. }
  61577. operator VkSubpassDescriptionDepthStencilResolve &() VULKAN_HPP_NOEXCEPT
  61578. {
  61579. return *reinterpret_cast<VkSubpassDescriptionDepthStencilResolve*>( this );
  61580. }
  61581. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61582. auto operator<=>( SubpassDescriptionDepthStencilResolve const& ) const = default;
  61583. #else
  61584. bool operator==( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
  61585. {
  61586. return ( sType == rhs.sType )
  61587. && ( pNext == rhs.pNext )
  61588. && ( depthResolveMode == rhs.depthResolveMode )
  61589. && ( stencilResolveMode == rhs.stencilResolveMode )
  61590. && ( pDepthStencilResolveAttachment == rhs.pDepthStencilResolveAttachment );
  61591. }
  61592. bool operator!=( SubpassDescriptionDepthStencilResolve const& rhs ) const VULKAN_HPP_NOEXCEPT
  61593. {
  61594. return !operator==( rhs );
  61595. }
  61596. #endif
  61597. public:
  61598. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSubpassDescriptionDepthStencilResolve;
  61599. const void* pNext = {};
  61600. VULKAN_HPP_NAMESPACE::ResolveModeFlagBits depthResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
  61601. VULKAN_HPP_NAMESPACE::ResolveModeFlagBits stencilResolveMode = VULKAN_HPP_NAMESPACE::ResolveModeFlagBits::eNone;
  61602. const VULKAN_HPP_NAMESPACE::AttachmentReference2* pDepthStencilResolveAttachment = {};
  61603. };
  61604. static_assert( sizeof( SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ), "struct and wrapper have different size!" );
  61605. static_assert( std::is_standard_layout<SubpassDescriptionDepthStencilResolve>::value, "struct wrapper is not a standard layout!" );
  61606. template <>
  61607. struct CppType<StructureType, StructureType::eSubpassDescriptionDepthStencilResolve>
  61608. {
  61609. using Type = SubpassDescriptionDepthStencilResolve;
  61610. };
  61611. using SubpassDescriptionDepthStencilResolveKHR = SubpassDescriptionDepthStencilResolve;
  61612. #ifdef VK_USE_PLATFORM_WIN32_KHR
  61613. struct SurfaceCapabilitiesFullScreenExclusiveEXT
  61614. {
  61615. static const bool allowDuplicate = false;
  61616. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
  61617. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61618. VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT(VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ = {}) VULKAN_HPP_NOEXCEPT
  61619. : fullScreenExclusiveSupported( fullScreenExclusiveSupported_ )
  61620. {}
  61621. VULKAN_HPP_CONSTEXPR SurfaceCapabilitiesFullScreenExclusiveEXT( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61622. SurfaceCapabilitiesFullScreenExclusiveEXT( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61623. : SurfaceCapabilitiesFullScreenExclusiveEXT( *reinterpret_cast<SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs ) )
  61624. {}
  61625. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61626. VULKAN_HPP_CONSTEXPR_14 SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( SurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61627. SurfaceCapabilitiesFullScreenExclusiveEXT & operator=( VkSurfaceCapabilitiesFullScreenExclusiveEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61628. {
  61629. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT const *>( &rhs );
  61630. return *this;
  61631. }
  61632. SurfaceCapabilitiesFullScreenExclusiveEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61633. {
  61634. pNext = pNext_;
  61635. return *this;
  61636. }
  61637. SurfaceCapabilitiesFullScreenExclusiveEXT & setFullScreenExclusiveSupported( VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported_ ) VULKAN_HPP_NOEXCEPT
  61638. {
  61639. fullScreenExclusiveSupported = fullScreenExclusiveSupported_;
  61640. return *this;
  61641. }
  61642. operator VkSurfaceCapabilitiesFullScreenExclusiveEXT const&() const VULKAN_HPP_NOEXCEPT
  61643. {
  61644. return *reinterpret_cast<const VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
  61645. }
  61646. operator VkSurfaceCapabilitiesFullScreenExclusiveEXT &() VULKAN_HPP_NOEXCEPT
  61647. {
  61648. return *reinterpret_cast<VkSurfaceCapabilitiesFullScreenExclusiveEXT*>( this );
  61649. }
  61650. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61651. auto operator<=>( SurfaceCapabilitiesFullScreenExclusiveEXT const& ) const = default;
  61652. #else
  61653. bool operator==( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61654. {
  61655. return ( sType == rhs.sType )
  61656. && ( pNext == rhs.pNext )
  61657. && ( fullScreenExclusiveSupported == rhs.fullScreenExclusiveSupported );
  61658. }
  61659. bool operator!=( SurfaceCapabilitiesFullScreenExclusiveEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61660. {
  61661. return !operator==( rhs );
  61662. }
  61663. #endif
  61664. public:
  61665. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT;
  61666. void* pNext = {};
  61667. VULKAN_HPP_NAMESPACE::Bool32 fullScreenExclusiveSupported = {};
  61668. };
  61669. static_assert( sizeof( SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ), "struct and wrapper have different size!" );
  61670. static_assert( std::is_standard_layout<SurfaceCapabilitiesFullScreenExclusiveEXT>::value, "struct wrapper is not a standard layout!" );
  61671. template <>
  61672. struct CppType<StructureType, StructureType::eSurfaceCapabilitiesFullScreenExclusiveEXT>
  61673. {
  61674. using Type = SurfaceCapabilitiesFullScreenExclusiveEXT;
  61675. };
  61676. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  61677. #ifdef VK_USE_PLATFORM_WIN32_KHR
  61678. struct SurfaceFullScreenExclusiveInfoEXT
  61679. {
  61680. static const bool allowDuplicate = false;
  61681. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
  61682. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61683. VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT(VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault) VULKAN_HPP_NOEXCEPT
  61684. : fullScreenExclusive( fullScreenExclusive_ )
  61685. {}
  61686. VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveInfoEXT( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61687. SurfaceFullScreenExclusiveInfoEXT( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61688. : SurfaceFullScreenExclusiveInfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveInfoEXT const *>( &rhs ) )
  61689. {}
  61690. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61691. VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveInfoEXT & operator=( SurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61692. SurfaceFullScreenExclusiveInfoEXT & operator=( VkSurfaceFullScreenExclusiveInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61693. {
  61694. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT const *>( &rhs );
  61695. return *this;
  61696. }
  61697. SurfaceFullScreenExclusiveInfoEXT & setPNext( void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61698. {
  61699. pNext = pNext_;
  61700. return *this;
  61701. }
  61702. SurfaceFullScreenExclusiveInfoEXT & setFullScreenExclusive( VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive_ ) VULKAN_HPP_NOEXCEPT
  61703. {
  61704. fullScreenExclusive = fullScreenExclusive_;
  61705. return *this;
  61706. }
  61707. operator VkSurfaceFullScreenExclusiveInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  61708. {
  61709. return *reinterpret_cast<const VkSurfaceFullScreenExclusiveInfoEXT*>( this );
  61710. }
  61711. operator VkSurfaceFullScreenExclusiveInfoEXT &() VULKAN_HPP_NOEXCEPT
  61712. {
  61713. return *reinterpret_cast<VkSurfaceFullScreenExclusiveInfoEXT*>( this );
  61714. }
  61715. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61716. auto operator<=>( SurfaceFullScreenExclusiveInfoEXT const& ) const = default;
  61717. #else
  61718. bool operator==( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61719. {
  61720. return ( sType == rhs.sType )
  61721. && ( pNext == rhs.pNext )
  61722. && ( fullScreenExclusive == rhs.fullScreenExclusive );
  61723. }
  61724. bool operator!=( SurfaceFullScreenExclusiveInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61725. {
  61726. return !operator==( rhs );
  61727. }
  61728. #endif
  61729. public:
  61730. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveInfoEXT;
  61731. void* pNext = {};
  61732. VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT fullScreenExclusive = VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT::eDefault;
  61733. };
  61734. static_assert( sizeof( SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ), "struct and wrapper have different size!" );
  61735. static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
  61736. template <>
  61737. struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveInfoEXT>
  61738. {
  61739. using Type = SurfaceFullScreenExclusiveInfoEXT;
  61740. };
  61741. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  61742. #ifdef VK_USE_PLATFORM_WIN32_KHR
  61743. struct SurfaceFullScreenExclusiveWin32InfoEXT
  61744. {
  61745. static const bool allowDuplicate = false;
  61746. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
  61747. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61748. VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT(HMONITOR hmonitor_ = {}) VULKAN_HPP_NOEXCEPT
  61749. : hmonitor( hmonitor_ )
  61750. {}
  61751. VULKAN_HPP_CONSTEXPR SurfaceFullScreenExclusiveWin32InfoEXT( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61752. SurfaceFullScreenExclusiveWin32InfoEXT( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61753. : SurfaceFullScreenExclusiveWin32InfoEXT( *reinterpret_cast<SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs ) )
  61754. {}
  61755. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61756. VULKAN_HPP_CONSTEXPR_14 SurfaceFullScreenExclusiveWin32InfoEXT & operator=( SurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61757. SurfaceFullScreenExclusiveWin32InfoEXT & operator=( VkSurfaceFullScreenExclusiveWin32InfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61758. {
  61759. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT const *>( &rhs );
  61760. return *this;
  61761. }
  61762. SurfaceFullScreenExclusiveWin32InfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61763. {
  61764. pNext = pNext_;
  61765. return *this;
  61766. }
  61767. SurfaceFullScreenExclusiveWin32InfoEXT & setHmonitor( HMONITOR hmonitor_ ) VULKAN_HPP_NOEXCEPT
  61768. {
  61769. hmonitor = hmonitor_;
  61770. return *this;
  61771. }
  61772. operator VkSurfaceFullScreenExclusiveWin32InfoEXT const&() const VULKAN_HPP_NOEXCEPT
  61773. {
  61774. return *reinterpret_cast<const VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
  61775. }
  61776. operator VkSurfaceFullScreenExclusiveWin32InfoEXT &() VULKAN_HPP_NOEXCEPT
  61777. {
  61778. return *reinterpret_cast<VkSurfaceFullScreenExclusiveWin32InfoEXT*>( this );
  61779. }
  61780. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61781. auto operator<=>( SurfaceFullScreenExclusiveWin32InfoEXT const& ) const = default;
  61782. #else
  61783. bool operator==( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61784. {
  61785. return ( sType == rhs.sType )
  61786. && ( pNext == rhs.pNext )
  61787. && ( hmonitor == rhs.hmonitor );
  61788. }
  61789. bool operator!=( SurfaceFullScreenExclusiveWin32InfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61790. {
  61791. return !operator==( rhs );
  61792. }
  61793. #endif
  61794. public:
  61795. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT;
  61796. const void* pNext = {};
  61797. HMONITOR hmonitor = {};
  61798. };
  61799. static_assert( sizeof( SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ), "struct and wrapper have different size!" );
  61800. static_assert( std::is_standard_layout<SurfaceFullScreenExclusiveWin32InfoEXT>::value, "struct wrapper is not a standard layout!" );
  61801. template <>
  61802. struct CppType<StructureType, StructureType::eSurfaceFullScreenExclusiveWin32InfoEXT>
  61803. {
  61804. using Type = SurfaceFullScreenExclusiveWin32InfoEXT;
  61805. };
  61806. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  61807. struct SurfaceProtectedCapabilitiesKHR
  61808. {
  61809. static const bool allowDuplicate = false;
  61810. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSurfaceProtectedCapabilitiesKHR;
  61811. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61812. VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR(VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ = {}) VULKAN_HPP_NOEXCEPT
  61813. : supportsProtected( supportsProtected_ )
  61814. {}
  61815. VULKAN_HPP_CONSTEXPR SurfaceProtectedCapabilitiesKHR( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61816. SurfaceProtectedCapabilitiesKHR( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  61817. : SurfaceProtectedCapabilitiesKHR( *reinterpret_cast<SurfaceProtectedCapabilitiesKHR const *>( &rhs ) )
  61818. {}
  61819. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61820. VULKAN_HPP_CONSTEXPR_14 SurfaceProtectedCapabilitiesKHR & operator=( SurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61821. SurfaceProtectedCapabilitiesKHR & operator=( VkSurfaceProtectedCapabilitiesKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  61822. {
  61823. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR const *>( &rhs );
  61824. return *this;
  61825. }
  61826. SurfaceProtectedCapabilitiesKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61827. {
  61828. pNext = pNext_;
  61829. return *this;
  61830. }
  61831. SurfaceProtectedCapabilitiesKHR & setSupportsProtected( VULKAN_HPP_NAMESPACE::Bool32 supportsProtected_ ) VULKAN_HPP_NOEXCEPT
  61832. {
  61833. supportsProtected = supportsProtected_;
  61834. return *this;
  61835. }
  61836. operator VkSurfaceProtectedCapabilitiesKHR const&() const VULKAN_HPP_NOEXCEPT
  61837. {
  61838. return *reinterpret_cast<const VkSurfaceProtectedCapabilitiesKHR*>( this );
  61839. }
  61840. operator VkSurfaceProtectedCapabilitiesKHR &() VULKAN_HPP_NOEXCEPT
  61841. {
  61842. return *reinterpret_cast<VkSurfaceProtectedCapabilitiesKHR*>( this );
  61843. }
  61844. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61845. auto operator<=>( SurfaceProtectedCapabilitiesKHR const& ) const = default;
  61846. #else
  61847. bool operator==( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  61848. {
  61849. return ( sType == rhs.sType )
  61850. && ( pNext == rhs.pNext )
  61851. && ( supportsProtected == rhs.supportsProtected );
  61852. }
  61853. bool operator!=( SurfaceProtectedCapabilitiesKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  61854. {
  61855. return !operator==( rhs );
  61856. }
  61857. #endif
  61858. public:
  61859. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSurfaceProtectedCapabilitiesKHR;
  61860. const void* pNext = {};
  61861. VULKAN_HPP_NAMESPACE::Bool32 supportsProtected = {};
  61862. };
  61863. static_assert( sizeof( SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ), "struct and wrapper have different size!" );
  61864. static_assert( std::is_standard_layout<SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
  61865. template <>
  61866. struct CppType<StructureType, StructureType::eSurfaceProtectedCapabilitiesKHR>
  61867. {
  61868. using Type = SurfaceProtectedCapabilitiesKHR;
  61869. };
  61870. struct SwapchainCounterCreateInfoEXT
  61871. {
  61872. static const bool allowDuplicate = false;
  61873. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainCounterCreateInfoEXT;
  61874. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61875. VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT(VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ = {}) VULKAN_HPP_NOEXCEPT
  61876. : surfaceCounters( surfaceCounters_ )
  61877. {}
  61878. VULKAN_HPP_CONSTEXPR SwapchainCounterCreateInfoEXT( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61879. SwapchainCounterCreateInfoEXT( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61880. : SwapchainCounterCreateInfoEXT( *reinterpret_cast<SwapchainCounterCreateInfoEXT const *>( &rhs ) )
  61881. {}
  61882. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61883. VULKAN_HPP_CONSTEXPR_14 SwapchainCounterCreateInfoEXT & operator=( SwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61884. SwapchainCounterCreateInfoEXT & operator=( VkSwapchainCounterCreateInfoEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  61885. {
  61886. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT const *>( &rhs );
  61887. return *this;
  61888. }
  61889. SwapchainCounterCreateInfoEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61890. {
  61891. pNext = pNext_;
  61892. return *this;
  61893. }
  61894. SwapchainCounterCreateInfoEXT & setSurfaceCounters( VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters_ ) VULKAN_HPP_NOEXCEPT
  61895. {
  61896. surfaceCounters = surfaceCounters_;
  61897. return *this;
  61898. }
  61899. operator VkSwapchainCounterCreateInfoEXT const&() const VULKAN_HPP_NOEXCEPT
  61900. {
  61901. return *reinterpret_cast<const VkSwapchainCounterCreateInfoEXT*>( this );
  61902. }
  61903. operator VkSwapchainCounterCreateInfoEXT &() VULKAN_HPP_NOEXCEPT
  61904. {
  61905. return *reinterpret_cast<VkSwapchainCounterCreateInfoEXT*>( this );
  61906. }
  61907. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61908. auto operator<=>( SwapchainCounterCreateInfoEXT const& ) const = default;
  61909. #else
  61910. bool operator==( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61911. {
  61912. return ( sType == rhs.sType )
  61913. && ( pNext == rhs.pNext )
  61914. && ( surfaceCounters == rhs.surfaceCounters );
  61915. }
  61916. bool operator!=( SwapchainCounterCreateInfoEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  61917. {
  61918. return !operator==( rhs );
  61919. }
  61920. #endif
  61921. public:
  61922. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainCounterCreateInfoEXT;
  61923. const void* pNext = {};
  61924. VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT surfaceCounters = {};
  61925. };
  61926. static_assert( sizeof( SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ), "struct and wrapper have different size!" );
  61927. static_assert( std::is_standard_layout<SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
  61928. template <>
  61929. struct CppType<StructureType, StructureType::eSwapchainCounterCreateInfoEXT>
  61930. {
  61931. using Type = SwapchainCounterCreateInfoEXT;
  61932. };
  61933. struct SwapchainDisplayNativeHdrCreateInfoAMD
  61934. {
  61935. static const bool allowDuplicate = false;
  61936. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
  61937. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61938. VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD(VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ = {}) VULKAN_HPP_NOEXCEPT
  61939. : localDimmingEnable( localDimmingEnable_ )
  61940. {}
  61941. VULKAN_HPP_CONSTEXPR SwapchainDisplayNativeHdrCreateInfoAMD( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61942. SwapchainDisplayNativeHdrCreateInfoAMD( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61943. : SwapchainDisplayNativeHdrCreateInfoAMD( *reinterpret_cast<SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs ) )
  61944. {}
  61945. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  61946. VULKAN_HPP_CONSTEXPR_14 SwapchainDisplayNativeHdrCreateInfoAMD & operator=( SwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  61947. SwapchainDisplayNativeHdrCreateInfoAMD & operator=( VkSwapchainDisplayNativeHdrCreateInfoAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  61948. {
  61949. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD const *>( &rhs );
  61950. return *this;
  61951. }
  61952. SwapchainDisplayNativeHdrCreateInfoAMD & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  61953. {
  61954. pNext = pNext_;
  61955. return *this;
  61956. }
  61957. SwapchainDisplayNativeHdrCreateInfoAMD & setLocalDimmingEnable( VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable_ ) VULKAN_HPP_NOEXCEPT
  61958. {
  61959. localDimmingEnable = localDimmingEnable_;
  61960. return *this;
  61961. }
  61962. operator VkSwapchainDisplayNativeHdrCreateInfoAMD const&() const VULKAN_HPP_NOEXCEPT
  61963. {
  61964. return *reinterpret_cast<const VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
  61965. }
  61966. operator VkSwapchainDisplayNativeHdrCreateInfoAMD &() VULKAN_HPP_NOEXCEPT
  61967. {
  61968. return *reinterpret_cast<VkSwapchainDisplayNativeHdrCreateInfoAMD*>( this );
  61969. }
  61970. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  61971. auto operator<=>( SwapchainDisplayNativeHdrCreateInfoAMD const& ) const = default;
  61972. #else
  61973. bool operator==( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61974. {
  61975. return ( sType == rhs.sType )
  61976. && ( pNext == rhs.pNext )
  61977. && ( localDimmingEnable == rhs.localDimmingEnable );
  61978. }
  61979. bool operator!=( SwapchainDisplayNativeHdrCreateInfoAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  61980. {
  61981. return !operator==( rhs );
  61982. }
  61983. #endif
  61984. public:
  61985. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD;
  61986. const void* pNext = {};
  61987. VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable = {};
  61988. };
  61989. static_assert( sizeof( SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ), "struct and wrapper have different size!" );
  61990. static_assert( std::is_standard_layout<SwapchainDisplayNativeHdrCreateInfoAMD>::value, "struct wrapper is not a standard layout!" );
  61991. template <>
  61992. struct CppType<StructureType, StructureType::eSwapchainDisplayNativeHdrCreateInfoAMD>
  61993. {
  61994. using Type = SwapchainDisplayNativeHdrCreateInfoAMD;
  61995. };
  61996. struct TextureLODGatherFormatPropertiesAMD
  61997. {
  61998. static const bool allowDuplicate = false;
  61999. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTextureLodGatherFormatPropertiesAMD;
  62000. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62001. VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD(VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD_ = {}) VULKAN_HPP_NOEXCEPT
  62002. : supportsTextureGatherLODBiasAMD( supportsTextureGatherLODBiasAMD_ )
  62003. {}
  62004. VULKAN_HPP_CONSTEXPR TextureLODGatherFormatPropertiesAMD( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62005. TextureLODGatherFormatPropertiesAMD( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  62006. : TextureLODGatherFormatPropertiesAMD( *reinterpret_cast<TextureLODGatherFormatPropertiesAMD const *>( &rhs ) )
  62007. {}
  62008. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62009. VULKAN_HPP_CONSTEXPR_14 TextureLODGatherFormatPropertiesAMD & operator=( TextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62010. TextureLODGatherFormatPropertiesAMD & operator=( VkTextureLODGatherFormatPropertiesAMD const & rhs ) VULKAN_HPP_NOEXCEPT
  62011. {
  62012. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD const *>( &rhs );
  62013. return *this;
  62014. }
  62015. operator VkTextureLODGatherFormatPropertiesAMD const&() const VULKAN_HPP_NOEXCEPT
  62016. {
  62017. return *reinterpret_cast<const VkTextureLODGatherFormatPropertiesAMD*>( this );
  62018. }
  62019. operator VkTextureLODGatherFormatPropertiesAMD &() VULKAN_HPP_NOEXCEPT
  62020. {
  62021. return *reinterpret_cast<VkTextureLODGatherFormatPropertiesAMD*>( this );
  62022. }
  62023. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62024. auto operator<=>( TextureLODGatherFormatPropertiesAMD const& ) const = default;
  62025. #else
  62026. bool operator==( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  62027. {
  62028. return ( sType == rhs.sType )
  62029. && ( pNext == rhs.pNext )
  62030. && ( supportsTextureGatherLODBiasAMD == rhs.supportsTextureGatherLODBiasAMD );
  62031. }
  62032. bool operator!=( TextureLODGatherFormatPropertiesAMD const& rhs ) const VULKAN_HPP_NOEXCEPT
  62033. {
  62034. return !operator==( rhs );
  62035. }
  62036. #endif
  62037. public:
  62038. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTextureLodGatherFormatPropertiesAMD;
  62039. void* pNext = {};
  62040. VULKAN_HPP_NAMESPACE::Bool32 supportsTextureGatherLODBiasAMD = {};
  62041. };
  62042. static_assert( sizeof( TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ), "struct and wrapper have different size!" );
  62043. static_assert( std::is_standard_layout<TextureLODGatherFormatPropertiesAMD>::value, "struct wrapper is not a standard layout!" );
  62044. template <>
  62045. struct CppType<StructureType, StructureType::eTextureLodGatherFormatPropertiesAMD>
  62046. {
  62047. using Type = TextureLODGatherFormatPropertiesAMD;
  62048. };
  62049. struct TimelineSemaphoreSubmitInfo
  62050. {
  62051. static const bool allowDuplicate = false;
  62052. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eTimelineSemaphoreSubmitInfo;
  62053. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62054. VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo(uint32_t waitSemaphoreValueCount_ = {}, const uint64_t* pWaitSemaphoreValues_ = {}, uint32_t signalSemaphoreValueCount_ = {}, const uint64_t* pSignalSemaphoreValues_ = {}) VULKAN_HPP_NOEXCEPT
  62055. : waitSemaphoreValueCount( waitSemaphoreValueCount_ ), pWaitSemaphoreValues( pWaitSemaphoreValues_ ), signalSemaphoreValueCount( signalSemaphoreValueCount_ ), pSignalSemaphoreValues( pSignalSemaphoreValues_ )
  62056. {}
  62057. VULKAN_HPP_CONSTEXPR TimelineSemaphoreSubmitInfo( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62058. TimelineSemaphoreSubmitInfo( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  62059. : TimelineSemaphoreSubmitInfo( *reinterpret_cast<TimelineSemaphoreSubmitInfo const *>( &rhs ) )
  62060. {}
  62061. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62062. TimelineSemaphoreSubmitInfo( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ = {} )
  62063. : waitSemaphoreValueCount( static_cast<uint32_t>( waitSemaphoreValues_.size() ) ), pWaitSemaphoreValues( waitSemaphoreValues_.data() ), signalSemaphoreValueCount( static_cast<uint32_t>( signalSemaphoreValues_.size() ) ), pSignalSemaphoreValues( signalSemaphoreValues_.data() )
  62064. {}
  62065. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62066. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62067. VULKAN_HPP_CONSTEXPR_14 TimelineSemaphoreSubmitInfo & operator=( TimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62068. TimelineSemaphoreSubmitInfo & operator=( VkTimelineSemaphoreSubmitInfo const & rhs ) VULKAN_HPP_NOEXCEPT
  62069. {
  62070. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo const *>( &rhs );
  62071. return *this;
  62072. }
  62073. TimelineSemaphoreSubmitInfo & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62074. {
  62075. pNext = pNext_;
  62076. return *this;
  62077. }
  62078. TimelineSemaphoreSubmitInfo & setWaitSemaphoreValueCount( uint32_t waitSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
  62079. {
  62080. waitSemaphoreValueCount = waitSemaphoreValueCount_;
  62081. return *this;
  62082. }
  62083. TimelineSemaphoreSubmitInfo & setPWaitSemaphoreValues( const uint64_t* pWaitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  62084. {
  62085. pWaitSemaphoreValues = pWaitSemaphoreValues_;
  62086. return *this;
  62087. }
  62088. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62089. TimelineSemaphoreSubmitInfo & setWaitSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & waitSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  62090. {
  62091. waitSemaphoreValueCount = static_cast<uint32_t>( waitSemaphoreValues_.size() );
  62092. pWaitSemaphoreValues = waitSemaphoreValues_.data();
  62093. return *this;
  62094. }
  62095. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62096. TimelineSemaphoreSubmitInfo & setSignalSemaphoreValueCount( uint32_t signalSemaphoreValueCount_ ) VULKAN_HPP_NOEXCEPT
  62097. {
  62098. signalSemaphoreValueCount = signalSemaphoreValueCount_;
  62099. return *this;
  62100. }
  62101. TimelineSemaphoreSubmitInfo & setPSignalSemaphoreValues( const uint64_t* pSignalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  62102. {
  62103. pSignalSemaphoreValues = pSignalSemaphoreValues_;
  62104. return *this;
  62105. }
  62106. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62107. TimelineSemaphoreSubmitInfo & setSignalSemaphoreValues( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & signalSemaphoreValues_ ) VULKAN_HPP_NOEXCEPT
  62108. {
  62109. signalSemaphoreValueCount = static_cast<uint32_t>( signalSemaphoreValues_.size() );
  62110. pSignalSemaphoreValues = signalSemaphoreValues_.data();
  62111. return *this;
  62112. }
  62113. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62114. operator VkTimelineSemaphoreSubmitInfo const&() const VULKAN_HPP_NOEXCEPT
  62115. {
  62116. return *reinterpret_cast<const VkTimelineSemaphoreSubmitInfo*>( this );
  62117. }
  62118. operator VkTimelineSemaphoreSubmitInfo &() VULKAN_HPP_NOEXCEPT
  62119. {
  62120. return *reinterpret_cast<VkTimelineSemaphoreSubmitInfo*>( this );
  62121. }
  62122. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62123. auto operator<=>( TimelineSemaphoreSubmitInfo const& ) const = default;
  62124. #else
  62125. bool operator==( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  62126. {
  62127. return ( sType == rhs.sType )
  62128. && ( pNext == rhs.pNext )
  62129. && ( waitSemaphoreValueCount == rhs.waitSemaphoreValueCount )
  62130. && ( pWaitSemaphoreValues == rhs.pWaitSemaphoreValues )
  62131. && ( signalSemaphoreValueCount == rhs.signalSemaphoreValueCount )
  62132. && ( pSignalSemaphoreValues == rhs.pSignalSemaphoreValues );
  62133. }
  62134. bool operator!=( TimelineSemaphoreSubmitInfo const& rhs ) const VULKAN_HPP_NOEXCEPT
  62135. {
  62136. return !operator==( rhs );
  62137. }
  62138. #endif
  62139. public:
  62140. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eTimelineSemaphoreSubmitInfo;
  62141. const void* pNext = {};
  62142. uint32_t waitSemaphoreValueCount = {};
  62143. const uint64_t* pWaitSemaphoreValues = {};
  62144. uint32_t signalSemaphoreValueCount = {};
  62145. const uint64_t* pSignalSemaphoreValues = {};
  62146. };
  62147. static_assert( sizeof( TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
  62148. static_assert( std::is_standard_layout<TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
  62149. template <>
  62150. struct CppType<StructureType, StructureType::eTimelineSemaphoreSubmitInfo>
  62151. {
  62152. using Type = TimelineSemaphoreSubmitInfo;
  62153. };
  62154. using TimelineSemaphoreSubmitInfoKHR = TimelineSemaphoreSubmitInfo;
  62155. struct TraceRaysIndirectCommandKHR
  62156. {
  62157. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62158. VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR(uint32_t width_ = {}, uint32_t height_ = {}, uint32_t depth_ = {}) VULKAN_HPP_NOEXCEPT
  62159. : width( width_ ), height( height_ ), depth( depth_ )
  62160. {}
  62161. VULKAN_HPP_CONSTEXPR TraceRaysIndirectCommandKHR( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62162. TraceRaysIndirectCommandKHR( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62163. : TraceRaysIndirectCommandKHR( *reinterpret_cast<TraceRaysIndirectCommandKHR const *>( &rhs ) )
  62164. {}
  62165. explicit TraceRaysIndirectCommandKHR( Extent2D const& extent2D, uint32_t depth_ = {} )
  62166. : width( extent2D.width )
  62167. , height( extent2D.height )
  62168. , depth( depth_ )
  62169. {}
  62170. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62171. VULKAN_HPP_CONSTEXPR_14 TraceRaysIndirectCommandKHR & operator=( TraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62172. TraceRaysIndirectCommandKHR & operator=( VkTraceRaysIndirectCommandKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62173. {
  62174. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR const *>( &rhs );
  62175. return *this;
  62176. }
  62177. TraceRaysIndirectCommandKHR & setWidth( uint32_t width_ ) VULKAN_HPP_NOEXCEPT
  62178. {
  62179. width = width_;
  62180. return *this;
  62181. }
  62182. TraceRaysIndirectCommandKHR & setHeight( uint32_t height_ ) VULKAN_HPP_NOEXCEPT
  62183. {
  62184. height = height_;
  62185. return *this;
  62186. }
  62187. TraceRaysIndirectCommandKHR & setDepth( uint32_t depth_ ) VULKAN_HPP_NOEXCEPT
  62188. {
  62189. depth = depth_;
  62190. return *this;
  62191. }
  62192. operator VkTraceRaysIndirectCommandKHR const&() const VULKAN_HPP_NOEXCEPT
  62193. {
  62194. return *reinterpret_cast<const VkTraceRaysIndirectCommandKHR*>( this );
  62195. }
  62196. operator VkTraceRaysIndirectCommandKHR &() VULKAN_HPP_NOEXCEPT
  62197. {
  62198. return *reinterpret_cast<VkTraceRaysIndirectCommandKHR*>( this );
  62199. }
  62200. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62201. auto operator<=>( TraceRaysIndirectCommandKHR const& ) const = default;
  62202. #else
  62203. bool operator==( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62204. {
  62205. return ( width == rhs.width )
  62206. && ( height == rhs.height )
  62207. && ( depth == rhs.depth );
  62208. }
  62209. bool operator!=( TraceRaysIndirectCommandKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62210. {
  62211. return !operator==( rhs );
  62212. }
  62213. #endif
  62214. public:
  62215. uint32_t width = {};
  62216. uint32_t height = {};
  62217. uint32_t depth = {};
  62218. };
  62219. static_assert( sizeof( TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ), "struct and wrapper have different size!" );
  62220. static_assert( std::is_standard_layout<TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
  62221. struct ValidationFeaturesEXT
  62222. {
  62223. static const bool allowDuplicate = false;
  62224. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFeaturesEXT;
  62225. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62226. VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT(uint32_t enabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ = {}, uint32_t disabledValidationFeatureCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ = {}) VULKAN_HPP_NOEXCEPT
  62227. : enabledValidationFeatureCount( enabledValidationFeatureCount_ ), pEnabledValidationFeatures( pEnabledValidationFeatures_ ), disabledValidationFeatureCount( disabledValidationFeatureCount_ ), pDisabledValidationFeatures( pDisabledValidationFeatures_ )
  62228. {}
  62229. VULKAN_HPP_CONSTEXPR ValidationFeaturesEXT( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62230. ValidationFeaturesEXT( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  62231. : ValidationFeaturesEXT( *reinterpret_cast<ValidationFeaturesEXT const *>( &rhs ) )
  62232. {}
  62233. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62234. ValidationFeaturesEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ = {} )
  62235. : enabledValidationFeatureCount( static_cast<uint32_t>( enabledValidationFeatures_.size() ) ), pEnabledValidationFeatures( enabledValidationFeatures_.data() ), disabledValidationFeatureCount( static_cast<uint32_t>( disabledValidationFeatures_.size() ) ), pDisabledValidationFeatures( disabledValidationFeatures_.data() )
  62236. {}
  62237. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62238. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62239. VULKAN_HPP_CONSTEXPR_14 ValidationFeaturesEXT & operator=( ValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62240. ValidationFeaturesEXT & operator=( VkValidationFeaturesEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  62241. {
  62242. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT const *>( &rhs );
  62243. return *this;
  62244. }
  62245. ValidationFeaturesEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62246. {
  62247. pNext = pNext_;
  62248. return *this;
  62249. }
  62250. ValidationFeaturesEXT & setEnabledValidationFeatureCount( uint32_t enabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
  62251. {
  62252. enabledValidationFeatureCount = enabledValidationFeatureCount_;
  62253. return *this;
  62254. }
  62255. ValidationFeaturesEXT & setPEnabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
  62256. {
  62257. pEnabledValidationFeatures = pEnabledValidationFeatures_;
  62258. return *this;
  62259. }
  62260. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62261. ValidationFeaturesEXT & setEnabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT> const & enabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
  62262. {
  62263. enabledValidationFeatureCount = static_cast<uint32_t>( enabledValidationFeatures_.size() );
  62264. pEnabledValidationFeatures = enabledValidationFeatures_.data();
  62265. return *this;
  62266. }
  62267. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62268. ValidationFeaturesEXT & setDisabledValidationFeatureCount( uint32_t disabledValidationFeatureCount_ ) VULKAN_HPP_NOEXCEPT
  62269. {
  62270. disabledValidationFeatureCount = disabledValidationFeatureCount_;
  62271. return *this;
  62272. }
  62273. ValidationFeaturesEXT & setPDisabledValidationFeatures( const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
  62274. {
  62275. pDisabledValidationFeatures = pDisabledValidationFeatures_;
  62276. return *this;
  62277. }
  62278. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62279. ValidationFeaturesEXT & setDisabledValidationFeatures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT> const & disabledValidationFeatures_ ) VULKAN_HPP_NOEXCEPT
  62280. {
  62281. disabledValidationFeatureCount = static_cast<uint32_t>( disabledValidationFeatures_.size() );
  62282. pDisabledValidationFeatures = disabledValidationFeatures_.data();
  62283. return *this;
  62284. }
  62285. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62286. operator VkValidationFeaturesEXT const&() const VULKAN_HPP_NOEXCEPT
  62287. {
  62288. return *reinterpret_cast<const VkValidationFeaturesEXT*>( this );
  62289. }
  62290. operator VkValidationFeaturesEXT &() VULKAN_HPP_NOEXCEPT
  62291. {
  62292. return *reinterpret_cast<VkValidationFeaturesEXT*>( this );
  62293. }
  62294. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62295. auto operator<=>( ValidationFeaturesEXT const& ) const = default;
  62296. #else
  62297. bool operator==( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  62298. {
  62299. return ( sType == rhs.sType )
  62300. && ( pNext == rhs.pNext )
  62301. && ( enabledValidationFeatureCount == rhs.enabledValidationFeatureCount )
  62302. && ( pEnabledValidationFeatures == rhs.pEnabledValidationFeatures )
  62303. && ( disabledValidationFeatureCount == rhs.disabledValidationFeatureCount )
  62304. && ( pDisabledValidationFeatures == rhs.pDisabledValidationFeatures );
  62305. }
  62306. bool operator!=( ValidationFeaturesEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  62307. {
  62308. return !operator==( rhs );
  62309. }
  62310. #endif
  62311. public:
  62312. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFeaturesEXT;
  62313. const void* pNext = {};
  62314. uint32_t enabledValidationFeatureCount = {};
  62315. const VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT* pEnabledValidationFeatures = {};
  62316. uint32_t disabledValidationFeatureCount = {};
  62317. const VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT* pDisabledValidationFeatures = {};
  62318. };
  62319. static_assert( sizeof( ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ), "struct and wrapper have different size!" );
  62320. static_assert( std::is_standard_layout<ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
  62321. template <>
  62322. struct CppType<StructureType, StructureType::eValidationFeaturesEXT>
  62323. {
  62324. using Type = ValidationFeaturesEXT;
  62325. };
  62326. struct ValidationFlagsEXT
  62327. {
  62328. static const bool allowDuplicate = false;
  62329. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eValidationFlagsEXT;
  62330. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62331. VULKAN_HPP_CONSTEXPR ValidationFlagsEXT(uint32_t disabledValidationCheckCount_ = {}, const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ = {}) VULKAN_HPP_NOEXCEPT
  62332. : disabledValidationCheckCount( disabledValidationCheckCount_ ), pDisabledValidationChecks( pDisabledValidationChecks_ )
  62333. {}
  62334. VULKAN_HPP_CONSTEXPR ValidationFlagsEXT( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62335. ValidationFlagsEXT( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  62336. : ValidationFlagsEXT( *reinterpret_cast<ValidationFlagsEXT const *>( &rhs ) )
  62337. {}
  62338. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62339. ValidationFlagsEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ )
  62340. : disabledValidationCheckCount( static_cast<uint32_t>( disabledValidationChecks_.size() ) ), pDisabledValidationChecks( disabledValidationChecks_.data() )
  62341. {}
  62342. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62343. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62344. VULKAN_HPP_CONSTEXPR_14 ValidationFlagsEXT & operator=( ValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62345. ValidationFlagsEXT & operator=( VkValidationFlagsEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  62346. {
  62347. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT const *>( &rhs );
  62348. return *this;
  62349. }
  62350. ValidationFlagsEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62351. {
  62352. pNext = pNext_;
  62353. return *this;
  62354. }
  62355. ValidationFlagsEXT & setDisabledValidationCheckCount( uint32_t disabledValidationCheckCount_ ) VULKAN_HPP_NOEXCEPT
  62356. {
  62357. disabledValidationCheckCount = disabledValidationCheckCount_;
  62358. return *this;
  62359. }
  62360. ValidationFlagsEXT & setPDisabledValidationChecks( const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
  62361. {
  62362. pDisabledValidationChecks = pDisabledValidationChecks_;
  62363. return *this;
  62364. }
  62365. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62366. ValidationFlagsEXT & setDisabledValidationChecks( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::ValidationCheckEXT> const & disabledValidationChecks_ ) VULKAN_HPP_NOEXCEPT
  62367. {
  62368. disabledValidationCheckCount = static_cast<uint32_t>( disabledValidationChecks_.size() );
  62369. pDisabledValidationChecks = disabledValidationChecks_.data();
  62370. return *this;
  62371. }
  62372. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62373. operator VkValidationFlagsEXT const&() const VULKAN_HPP_NOEXCEPT
  62374. {
  62375. return *reinterpret_cast<const VkValidationFlagsEXT*>( this );
  62376. }
  62377. operator VkValidationFlagsEXT &() VULKAN_HPP_NOEXCEPT
  62378. {
  62379. return *reinterpret_cast<VkValidationFlagsEXT*>( this );
  62380. }
  62381. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62382. auto operator<=>( ValidationFlagsEXT const& ) const = default;
  62383. #else
  62384. bool operator==( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  62385. {
  62386. return ( sType == rhs.sType )
  62387. && ( pNext == rhs.pNext )
  62388. && ( disabledValidationCheckCount == rhs.disabledValidationCheckCount )
  62389. && ( pDisabledValidationChecks == rhs.pDisabledValidationChecks );
  62390. }
  62391. bool operator!=( ValidationFlagsEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  62392. {
  62393. return !operator==( rhs );
  62394. }
  62395. #endif
  62396. public:
  62397. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eValidationFlagsEXT;
  62398. const void* pNext = {};
  62399. uint32_t disabledValidationCheckCount = {};
  62400. const VULKAN_HPP_NAMESPACE::ValidationCheckEXT* pDisabledValidationChecks = {};
  62401. };
  62402. static_assert( sizeof( ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
  62403. static_assert( std::is_standard_layout<ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
  62404. template <>
  62405. struct CppType<StructureType, StructureType::eValidationFlagsEXT>
  62406. {
  62407. using Type = ValidationFlagsEXT;
  62408. };
  62409. #ifdef VK_USE_PLATFORM_VI_NN
  62410. struct ViSurfaceCreateInfoNN
  62411. {
  62412. static const bool allowDuplicate = false;
  62413. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eViSurfaceCreateInfoNN;
  62414. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62415. VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN(VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ = {}, void* window_ = {}) VULKAN_HPP_NOEXCEPT
  62416. : flags( flags_ ), window( window_ )
  62417. {}
  62418. VULKAN_HPP_CONSTEXPR ViSurfaceCreateInfoNN( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62419. ViSurfaceCreateInfoNN( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
  62420. : ViSurfaceCreateInfoNN( *reinterpret_cast<ViSurfaceCreateInfoNN const *>( &rhs ) )
  62421. {}
  62422. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62423. VULKAN_HPP_CONSTEXPR_14 ViSurfaceCreateInfoNN & operator=( ViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62424. ViSurfaceCreateInfoNN & operator=( VkViSurfaceCreateInfoNN const & rhs ) VULKAN_HPP_NOEXCEPT
  62425. {
  62426. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN const *>( &rhs );
  62427. return *this;
  62428. }
  62429. ViSurfaceCreateInfoNN & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62430. {
  62431. pNext = pNext_;
  62432. return *this;
  62433. }
  62434. ViSurfaceCreateInfoNN & setFlags( VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags_ ) VULKAN_HPP_NOEXCEPT
  62435. {
  62436. flags = flags_;
  62437. return *this;
  62438. }
  62439. ViSurfaceCreateInfoNN & setWindow( void* window_ ) VULKAN_HPP_NOEXCEPT
  62440. {
  62441. window = window_;
  62442. return *this;
  62443. }
  62444. operator VkViSurfaceCreateInfoNN const&() const VULKAN_HPP_NOEXCEPT
  62445. {
  62446. return *reinterpret_cast<const VkViSurfaceCreateInfoNN*>( this );
  62447. }
  62448. operator VkViSurfaceCreateInfoNN &() VULKAN_HPP_NOEXCEPT
  62449. {
  62450. return *reinterpret_cast<VkViSurfaceCreateInfoNN*>( this );
  62451. }
  62452. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62453. auto operator<=>( ViSurfaceCreateInfoNN const& ) const = default;
  62454. #else
  62455. bool operator==( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
  62456. {
  62457. return ( sType == rhs.sType )
  62458. && ( pNext == rhs.pNext )
  62459. && ( flags == rhs.flags )
  62460. && ( window == rhs.window );
  62461. }
  62462. bool operator!=( ViSurfaceCreateInfoNN const& rhs ) const VULKAN_HPP_NOEXCEPT
  62463. {
  62464. return !operator==( rhs );
  62465. }
  62466. #endif
  62467. public:
  62468. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eViSurfaceCreateInfoNN;
  62469. const void* pNext = {};
  62470. VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN flags = {};
  62471. void* window = {};
  62472. };
  62473. static_assert( sizeof( ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ), "struct and wrapper have different size!" );
  62474. static_assert( std::is_standard_layout<ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
  62475. template <>
  62476. struct CppType<StructureType, StructureType::eViSurfaceCreateInfoNN>
  62477. {
  62478. using Type = ViSurfaceCreateInfoNN;
  62479. };
  62480. #endif /*VK_USE_PLATFORM_VI_NN*/
  62481. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  62482. struct WaylandSurfaceCreateInfoKHR
  62483. {
  62484. static const bool allowDuplicate = false;
  62485. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWaylandSurfaceCreateInfoKHR;
  62486. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62487. VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ = {}, struct wl_display* display_ = {}, struct wl_surface* surface_ = {}) VULKAN_HPP_NOEXCEPT
  62488. : flags( flags_ ), display( display_ ), surface( surface_ )
  62489. {}
  62490. VULKAN_HPP_CONSTEXPR WaylandSurfaceCreateInfoKHR( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62491. WaylandSurfaceCreateInfoKHR( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62492. : WaylandSurfaceCreateInfoKHR( *reinterpret_cast<WaylandSurfaceCreateInfoKHR const *>( &rhs ) )
  62493. {}
  62494. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62495. VULKAN_HPP_CONSTEXPR_14 WaylandSurfaceCreateInfoKHR & operator=( WaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62496. WaylandSurfaceCreateInfoKHR & operator=( VkWaylandSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62497. {
  62498. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR const *>( &rhs );
  62499. return *this;
  62500. }
  62501. WaylandSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62502. {
  62503. pNext = pNext_;
  62504. return *this;
  62505. }
  62506. WaylandSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  62507. {
  62508. flags = flags_;
  62509. return *this;
  62510. }
  62511. WaylandSurfaceCreateInfoKHR & setDisplay( struct wl_display* display_ ) VULKAN_HPP_NOEXCEPT
  62512. {
  62513. display = display_;
  62514. return *this;
  62515. }
  62516. WaylandSurfaceCreateInfoKHR & setSurface( struct wl_surface* surface_ ) VULKAN_HPP_NOEXCEPT
  62517. {
  62518. surface = surface_;
  62519. return *this;
  62520. }
  62521. operator VkWaylandSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  62522. {
  62523. return *reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR*>( this );
  62524. }
  62525. operator VkWaylandSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  62526. {
  62527. return *reinterpret_cast<VkWaylandSurfaceCreateInfoKHR*>( this );
  62528. }
  62529. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62530. auto operator<=>( WaylandSurfaceCreateInfoKHR const& ) const = default;
  62531. #else
  62532. bool operator==( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62533. {
  62534. return ( sType == rhs.sType )
  62535. && ( pNext == rhs.pNext )
  62536. && ( flags == rhs.flags )
  62537. && ( display == rhs.display )
  62538. && ( surface == rhs.surface );
  62539. }
  62540. bool operator!=( WaylandSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62541. {
  62542. return !operator==( rhs );
  62543. }
  62544. #endif
  62545. public:
  62546. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWaylandSurfaceCreateInfoKHR;
  62547. const void* pNext = {};
  62548. VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR flags = {};
  62549. struct wl_display* display = {};
  62550. struct wl_surface* surface = {};
  62551. };
  62552. static_assert( sizeof( WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  62553. static_assert( std::is_standard_layout<WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  62554. template <>
  62555. struct CppType<StructureType, StructureType::eWaylandSurfaceCreateInfoKHR>
  62556. {
  62557. using Type = WaylandSurfaceCreateInfoKHR;
  62558. };
  62559. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  62560. #ifdef VK_USE_PLATFORM_WIN32_KHR
  62561. struct Win32KeyedMutexAcquireReleaseInfoKHR
  62562. {
  62563. static const bool allowDuplicate = false;
  62564. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
  62565. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62566. VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeouts_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
  62567. : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeouts( pAcquireTimeouts_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
  62568. {}
  62569. VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoKHR( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62570. Win32KeyedMutexAcquireReleaseInfoKHR( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62571. : Win32KeyedMutexAcquireReleaseInfoKHR( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs ) )
  62572. {}
  62573. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62574. Win32KeyedMutexAcquireReleaseInfoKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
  62575. : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeouts( acquireTimeouts_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
  62576. {
  62577. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  62578. VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
  62579. VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeouts_.size() );
  62580. VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeouts_.size() );
  62581. #else
  62582. if ( acquireSyncs_.size() != acquireKeys_.size() )
  62583. {
  62584. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireKeys_.size()" );
  62585. }
  62586. if ( acquireSyncs_.size() != acquireTimeouts_.size() )
  62587. {
  62588. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireSyncs_.size() != acquireTimeouts_.size()" );
  62589. }
  62590. if ( acquireKeys_.size() != acquireTimeouts_.size() )
  62591. {
  62592. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: acquireKeys_.size() != acquireTimeouts_.size()" );
  62593. }
  62594. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  62595. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  62596. VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
  62597. #else
  62598. if ( releaseSyncs_.size() != releaseKeys_.size() )
  62599. {
  62600. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoKHR::Win32KeyedMutexAcquireReleaseInfoKHR: releaseSyncs_.size() != releaseKeys_.size()" );
  62601. }
  62602. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  62603. }
  62604. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62605. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62606. VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoKHR & operator=( Win32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62607. Win32KeyedMutexAcquireReleaseInfoKHR & operator=( VkWin32KeyedMutexAcquireReleaseInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62608. {
  62609. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR const *>( &rhs );
  62610. return *this;
  62611. }
  62612. Win32KeyedMutexAcquireReleaseInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62613. {
  62614. pNext = pNext_;
  62615. return *this;
  62616. }
  62617. Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
  62618. {
  62619. acquireCount = acquireCount_;
  62620. return *this;
  62621. }
  62622. Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
  62623. {
  62624. pAcquireSyncs = pAcquireSyncs_;
  62625. return *this;
  62626. }
  62627. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62628. Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
  62629. {
  62630. acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
  62631. pAcquireSyncs = acquireSyncs_.data();
  62632. return *this;
  62633. }
  62634. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62635. Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
  62636. {
  62637. pAcquireKeys = pAcquireKeys_;
  62638. return *this;
  62639. }
  62640. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62641. Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
  62642. {
  62643. acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
  62644. pAcquireKeys = acquireKeys_.data();
  62645. return *this;
  62646. }
  62647. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62648. Win32KeyedMutexAcquireReleaseInfoKHR & setPAcquireTimeouts( const uint32_t* pAcquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
  62649. {
  62650. pAcquireTimeouts = pAcquireTimeouts_;
  62651. return *this;
  62652. }
  62653. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62654. Win32KeyedMutexAcquireReleaseInfoKHR & setAcquireTimeouts( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeouts_ ) VULKAN_HPP_NOEXCEPT
  62655. {
  62656. acquireCount = static_cast<uint32_t>( acquireTimeouts_.size() );
  62657. pAcquireTimeouts = acquireTimeouts_.data();
  62658. return *this;
  62659. }
  62660. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62661. Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
  62662. {
  62663. releaseCount = releaseCount_;
  62664. return *this;
  62665. }
  62666. Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
  62667. {
  62668. pReleaseSyncs = pReleaseSyncs_;
  62669. return *this;
  62670. }
  62671. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62672. Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
  62673. {
  62674. releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
  62675. pReleaseSyncs = releaseSyncs_.data();
  62676. return *this;
  62677. }
  62678. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62679. Win32KeyedMutexAcquireReleaseInfoKHR & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
  62680. {
  62681. pReleaseKeys = pReleaseKeys_;
  62682. return *this;
  62683. }
  62684. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62685. Win32KeyedMutexAcquireReleaseInfoKHR & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
  62686. {
  62687. releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
  62688. pReleaseKeys = releaseKeys_.data();
  62689. return *this;
  62690. }
  62691. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62692. operator VkWin32KeyedMutexAcquireReleaseInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  62693. {
  62694. return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
  62695. }
  62696. operator VkWin32KeyedMutexAcquireReleaseInfoKHR &() VULKAN_HPP_NOEXCEPT
  62697. {
  62698. return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoKHR*>( this );
  62699. }
  62700. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62701. auto operator<=>( Win32KeyedMutexAcquireReleaseInfoKHR const& ) const = default;
  62702. #else
  62703. bool operator==( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62704. {
  62705. return ( sType == rhs.sType )
  62706. && ( pNext == rhs.pNext )
  62707. && ( acquireCount == rhs.acquireCount )
  62708. && ( pAcquireSyncs == rhs.pAcquireSyncs )
  62709. && ( pAcquireKeys == rhs.pAcquireKeys )
  62710. && ( pAcquireTimeouts == rhs.pAcquireTimeouts )
  62711. && ( releaseCount == rhs.releaseCount )
  62712. && ( pReleaseSyncs == rhs.pReleaseSyncs )
  62713. && ( pReleaseKeys == rhs.pReleaseKeys );
  62714. }
  62715. bool operator!=( Win32KeyedMutexAcquireReleaseInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62716. {
  62717. return !operator==( rhs );
  62718. }
  62719. #endif
  62720. public:
  62721. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR;
  62722. const void* pNext = {};
  62723. uint32_t acquireCount = {};
  62724. const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
  62725. const uint64_t* pAcquireKeys = {};
  62726. const uint32_t* pAcquireTimeouts = {};
  62727. uint32_t releaseCount = {};
  62728. const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
  62729. const uint64_t* pReleaseKeys = {};
  62730. };
  62731. static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ), "struct and wrapper have different size!" );
  62732. static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoKHR>::value, "struct wrapper is not a standard layout!" );
  62733. template <>
  62734. struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoKHR>
  62735. {
  62736. using Type = Win32KeyedMutexAcquireReleaseInfoKHR;
  62737. };
  62738. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  62739. #ifdef VK_USE_PLATFORM_WIN32_KHR
  62740. struct Win32KeyedMutexAcquireReleaseInfoNV
  62741. {
  62742. static const bool allowDuplicate = false;
  62743. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
  62744. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62745. VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV(uint32_t acquireCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ = {}, const uint64_t* pAcquireKeys_ = {}, const uint32_t* pAcquireTimeoutMilliseconds_ = {}, uint32_t releaseCount_ = {}, const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ = {}, const uint64_t* pReleaseKeys_ = {}) VULKAN_HPP_NOEXCEPT
  62746. : acquireCount( acquireCount_ ), pAcquireSyncs( pAcquireSyncs_ ), pAcquireKeys( pAcquireKeys_ ), pAcquireTimeoutMilliseconds( pAcquireTimeoutMilliseconds_ ), releaseCount( releaseCount_ ), pReleaseSyncs( pReleaseSyncs_ ), pReleaseKeys( pReleaseKeys_ )
  62747. {}
  62748. VULKAN_HPP_CONSTEXPR Win32KeyedMutexAcquireReleaseInfoNV( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62749. Win32KeyedMutexAcquireReleaseInfoNV( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  62750. : Win32KeyedMutexAcquireReleaseInfoNV( *reinterpret_cast<Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs ) )
  62751. {}
  62752. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62753. Win32KeyedMutexAcquireReleaseInfoNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ = {}, VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ = {} )
  62754. : acquireCount( static_cast<uint32_t>( acquireSyncs_.size() ) ), pAcquireSyncs( acquireSyncs_.data() ), pAcquireKeys( acquireKeys_.data() ), pAcquireTimeoutMilliseconds( acquireTimeoutMilliseconds_.data() ), releaseCount( static_cast<uint32_t>( releaseSyncs_.size() ) ), pReleaseSyncs( releaseSyncs_.data() ), pReleaseKeys( releaseKeys_.data() )
  62755. {
  62756. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  62757. VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireKeys_.size() );
  62758. VULKAN_HPP_ASSERT( acquireSyncs_.size() == acquireTimeoutMilliseconds_.size() );
  62759. VULKAN_HPP_ASSERT( acquireKeys_.size() == acquireTimeoutMilliseconds_.size() );
  62760. #else
  62761. if ( acquireSyncs_.size() != acquireKeys_.size() )
  62762. {
  62763. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireKeys_.size()" );
  62764. }
  62765. if ( acquireSyncs_.size() != acquireTimeoutMilliseconds_.size() )
  62766. {
  62767. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireSyncs_.size() != acquireTimeoutMilliseconds_.size()" );
  62768. }
  62769. if ( acquireKeys_.size() != acquireTimeoutMilliseconds_.size() )
  62770. {
  62771. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: acquireKeys_.size() != acquireTimeoutMilliseconds_.size()" );
  62772. }
  62773. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  62774. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  62775. VULKAN_HPP_ASSERT( releaseSyncs_.size() == releaseKeys_.size() );
  62776. #else
  62777. if ( releaseSyncs_.size() != releaseKeys_.size() )
  62778. {
  62779. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Win32KeyedMutexAcquireReleaseInfoNV::Win32KeyedMutexAcquireReleaseInfoNV: releaseSyncs_.size() != releaseKeys_.size()" );
  62780. }
  62781. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  62782. }
  62783. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62784. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62785. VULKAN_HPP_CONSTEXPR_14 Win32KeyedMutexAcquireReleaseInfoNV & operator=( Win32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62786. Win32KeyedMutexAcquireReleaseInfoNV & operator=( VkWin32KeyedMutexAcquireReleaseInfoNV const & rhs ) VULKAN_HPP_NOEXCEPT
  62787. {
  62788. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV const *>( &rhs );
  62789. return *this;
  62790. }
  62791. Win32KeyedMutexAcquireReleaseInfoNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62792. {
  62793. pNext = pNext_;
  62794. return *this;
  62795. }
  62796. Win32KeyedMutexAcquireReleaseInfoNV & setAcquireCount( uint32_t acquireCount_ ) VULKAN_HPP_NOEXCEPT
  62797. {
  62798. acquireCount = acquireCount_;
  62799. return *this;
  62800. }
  62801. Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs_ ) VULKAN_HPP_NOEXCEPT
  62802. {
  62803. pAcquireSyncs = pAcquireSyncs_;
  62804. return *this;
  62805. }
  62806. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62807. Win32KeyedMutexAcquireReleaseInfoNV & setAcquireSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & acquireSyncs_ ) VULKAN_HPP_NOEXCEPT
  62808. {
  62809. acquireCount = static_cast<uint32_t>( acquireSyncs_.size() );
  62810. pAcquireSyncs = acquireSyncs_.data();
  62811. return *this;
  62812. }
  62813. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62814. Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireKeys( const uint64_t* pAcquireKeys_ ) VULKAN_HPP_NOEXCEPT
  62815. {
  62816. pAcquireKeys = pAcquireKeys_;
  62817. return *this;
  62818. }
  62819. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62820. Win32KeyedMutexAcquireReleaseInfoNV & setAcquireKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & acquireKeys_ ) VULKAN_HPP_NOEXCEPT
  62821. {
  62822. acquireCount = static_cast<uint32_t>( acquireKeys_.size() );
  62823. pAcquireKeys = acquireKeys_.data();
  62824. return *this;
  62825. }
  62826. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62827. Win32KeyedMutexAcquireReleaseInfoNV & setPAcquireTimeoutMilliseconds( const uint32_t* pAcquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
  62828. {
  62829. pAcquireTimeoutMilliseconds = pAcquireTimeoutMilliseconds_;
  62830. return *this;
  62831. }
  62832. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62833. Win32KeyedMutexAcquireReleaseInfoNV & setAcquireTimeoutMilliseconds( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint32_t> const & acquireTimeoutMilliseconds_ ) VULKAN_HPP_NOEXCEPT
  62834. {
  62835. acquireCount = static_cast<uint32_t>( acquireTimeoutMilliseconds_.size() );
  62836. pAcquireTimeoutMilliseconds = acquireTimeoutMilliseconds_.data();
  62837. return *this;
  62838. }
  62839. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62840. Win32KeyedMutexAcquireReleaseInfoNV & setReleaseCount( uint32_t releaseCount_ ) VULKAN_HPP_NOEXCEPT
  62841. {
  62842. releaseCount = releaseCount_;
  62843. return *this;
  62844. }
  62845. Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseSyncs( const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs_ ) VULKAN_HPP_NOEXCEPT
  62846. {
  62847. pReleaseSyncs = pReleaseSyncs_;
  62848. return *this;
  62849. }
  62850. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62851. Win32KeyedMutexAcquireReleaseInfoNV & setReleaseSyncs( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::DeviceMemory> const & releaseSyncs_ ) VULKAN_HPP_NOEXCEPT
  62852. {
  62853. releaseCount = static_cast<uint32_t>( releaseSyncs_.size() );
  62854. pReleaseSyncs = releaseSyncs_.data();
  62855. return *this;
  62856. }
  62857. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62858. Win32KeyedMutexAcquireReleaseInfoNV & setPReleaseKeys( const uint64_t* pReleaseKeys_ ) VULKAN_HPP_NOEXCEPT
  62859. {
  62860. pReleaseKeys = pReleaseKeys_;
  62861. return *this;
  62862. }
  62863. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62864. Win32KeyedMutexAcquireReleaseInfoNV & setReleaseKeys( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const uint64_t> const & releaseKeys_ ) VULKAN_HPP_NOEXCEPT
  62865. {
  62866. releaseCount = static_cast<uint32_t>( releaseKeys_.size() );
  62867. pReleaseKeys = releaseKeys_.data();
  62868. return *this;
  62869. }
  62870. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  62871. operator VkWin32KeyedMutexAcquireReleaseInfoNV const&() const VULKAN_HPP_NOEXCEPT
  62872. {
  62873. return *reinterpret_cast<const VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
  62874. }
  62875. operator VkWin32KeyedMutexAcquireReleaseInfoNV &() VULKAN_HPP_NOEXCEPT
  62876. {
  62877. return *reinterpret_cast<VkWin32KeyedMutexAcquireReleaseInfoNV*>( this );
  62878. }
  62879. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62880. auto operator<=>( Win32KeyedMutexAcquireReleaseInfoNV const& ) const = default;
  62881. #else
  62882. bool operator==( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  62883. {
  62884. return ( sType == rhs.sType )
  62885. && ( pNext == rhs.pNext )
  62886. && ( acquireCount == rhs.acquireCount )
  62887. && ( pAcquireSyncs == rhs.pAcquireSyncs )
  62888. && ( pAcquireKeys == rhs.pAcquireKeys )
  62889. && ( pAcquireTimeoutMilliseconds == rhs.pAcquireTimeoutMilliseconds )
  62890. && ( releaseCount == rhs.releaseCount )
  62891. && ( pReleaseSyncs == rhs.pReleaseSyncs )
  62892. && ( pReleaseKeys == rhs.pReleaseKeys );
  62893. }
  62894. bool operator!=( Win32KeyedMutexAcquireReleaseInfoNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  62895. {
  62896. return !operator==( rhs );
  62897. }
  62898. #endif
  62899. public:
  62900. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32KeyedMutexAcquireReleaseInfoNV;
  62901. const void* pNext = {};
  62902. uint32_t acquireCount = {};
  62903. const VULKAN_HPP_NAMESPACE::DeviceMemory* pAcquireSyncs = {};
  62904. const uint64_t* pAcquireKeys = {};
  62905. const uint32_t* pAcquireTimeoutMilliseconds = {};
  62906. uint32_t releaseCount = {};
  62907. const VULKAN_HPP_NAMESPACE::DeviceMemory* pReleaseSyncs = {};
  62908. const uint64_t* pReleaseKeys = {};
  62909. };
  62910. static_assert( sizeof( Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ), "struct and wrapper have different size!" );
  62911. static_assert( std::is_standard_layout<Win32KeyedMutexAcquireReleaseInfoNV>::value, "struct wrapper is not a standard layout!" );
  62912. template <>
  62913. struct CppType<StructureType, StructureType::eWin32KeyedMutexAcquireReleaseInfoNV>
  62914. {
  62915. using Type = Win32KeyedMutexAcquireReleaseInfoNV;
  62916. };
  62917. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  62918. #ifdef VK_USE_PLATFORM_WIN32_KHR
  62919. struct Win32SurfaceCreateInfoKHR
  62920. {
  62921. static const bool allowDuplicate = false;
  62922. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWin32SurfaceCreateInfoKHR;
  62923. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62924. VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ = {}, HINSTANCE hinstance_ = {}, HWND hwnd_ = {}) VULKAN_HPP_NOEXCEPT
  62925. : flags( flags_ ), hinstance( hinstance_ ), hwnd( hwnd_ )
  62926. {}
  62927. VULKAN_HPP_CONSTEXPR Win32SurfaceCreateInfoKHR( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62928. Win32SurfaceCreateInfoKHR( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62929. : Win32SurfaceCreateInfoKHR( *reinterpret_cast<Win32SurfaceCreateInfoKHR const *>( &rhs ) )
  62930. {}
  62931. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  62932. VULKAN_HPP_CONSTEXPR_14 Win32SurfaceCreateInfoKHR & operator=( Win32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  62933. Win32SurfaceCreateInfoKHR & operator=( VkWin32SurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  62934. {
  62935. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR const *>( &rhs );
  62936. return *this;
  62937. }
  62938. Win32SurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  62939. {
  62940. pNext = pNext_;
  62941. return *this;
  62942. }
  62943. Win32SurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  62944. {
  62945. flags = flags_;
  62946. return *this;
  62947. }
  62948. Win32SurfaceCreateInfoKHR & setHinstance( HINSTANCE hinstance_ ) VULKAN_HPP_NOEXCEPT
  62949. {
  62950. hinstance = hinstance_;
  62951. return *this;
  62952. }
  62953. Win32SurfaceCreateInfoKHR & setHwnd( HWND hwnd_ ) VULKAN_HPP_NOEXCEPT
  62954. {
  62955. hwnd = hwnd_;
  62956. return *this;
  62957. }
  62958. operator VkWin32SurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  62959. {
  62960. return *reinterpret_cast<const VkWin32SurfaceCreateInfoKHR*>( this );
  62961. }
  62962. operator VkWin32SurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  62963. {
  62964. return *reinterpret_cast<VkWin32SurfaceCreateInfoKHR*>( this );
  62965. }
  62966. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  62967. auto operator<=>( Win32SurfaceCreateInfoKHR const& ) const = default;
  62968. #else
  62969. bool operator==( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62970. {
  62971. return ( sType == rhs.sType )
  62972. && ( pNext == rhs.pNext )
  62973. && ( flags == rhs.flags )
  62974. && ( hinstance == rhs.hinstance )
  62975. && ( hwnd == rhs.hwnd );
  62976. }
  62977. bool operator!=( Win32SurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  62978. {
  62979. return !operator==( rhs );
  62980. }
  62981. #endif
  62982. public:
  62983. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWin32SurfaceCreateInfoKHR;
  62984. const void* pNext = {};
  62985. VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR flags = {};
  62986. HINSTANCE hinstance = {};
  62987. HWND hwnd = {};
  62988. };
  62989. static_assert( sizeof( Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  62990. static_assert( std::is_standard_layout<Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  62991. template <>
  62992. struct CppType<StructureType, StructureType::eWin32SurfaceCreateInfoKHR>
  62993. {
  62994. using Type = Win32SurfaceCreateInfoKHR;
  62995. };
  62996. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  62997. struct WriteDescriptorSetAccelerationStructureKHR
  62998. {
  62999. static const bool allowDuplicate = false;
  63000. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
  63001. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63002. VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
  63003. : accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
  63004. {}
  63005. VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureKHR( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63006. WriteDescriptorSetAccelerationStructureKHR( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63007. : WriteDescriptorSetAccelerationStructureKHR( *reinterpret_cast<WriteDescriptorSetAccelerationStructureKHR const *>( &rhs ) )
  63008. {}
  63009. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63010. WriteDescriptorSetAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ )
  63011. : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
  63012. {}
  63013. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63014. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63015. VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureKHR & operator=( WriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63016. WriteDescriptorSetAccelerationStructureKHR & operator=( VkWriteDescriptorSetAccelerationStructureKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63017. {
  63018. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR const *>( &rhs );
  63019. return *this;
  63020. }
  63021. WriteDescriptorSetAccelerationStructureKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  63022. {
  63023. pNext = pNext_;
  63024. return *this;
  63025. }
  63026. WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
  63027. {
  63028. accelerationStructureCount = accelerationStructureCount_;
  63029. return *this;
  63030. }
  63031. WriteDescriptorSetAccelerationStructureKHR & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
  63032. {
  63033. pAccelerationStructures = pAccelerationStructures_;
  63034. return *this;
  63035. }
  63036. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63037. WriteDescriptorSetAccelerationStructureKHR & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
  63038. {
  63039. accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
  63040. pAccelerationStructures = accelerationStructures_.data();
  63041. return *this;
  63042. }
  63043. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63044. operator VkWriteDescriptorSetAccelerationStructureKHR const&() const VULKAN_HPP_NOEXCEPT
  63045. {
  63046. return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureKHR*>( this );
  63047. }
  63048. operator VkWriteDescriptorSetAccelerationStructureKHR &() VULKAN_HPP_NOEXCEPT
  63049. {
  63050. return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureKHR*>( this );
  63051. }
  63052. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63053. auto operator<=>( WriteDescriptorSetAccelerationStructureKHR const& ) const = default;
  63054. #else
  63055. bool operator==( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63056. {
  63057. return ( sType == rhs.sType )
  63058. && ( pNext == rhs.pNext )
  63059. && ( accelerationStructureCount == rhs.accelerationStructureCount )
  63060. && ( pAccelerationStructures == rhs.pAccelerationStructures );
  63061. }
  63062. bool operator!=( WriteDescriptorSetAccelerationStructureKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63063. {
  63064. return !operator==( rhs );
  63065. }
  63066. #endif
  63067. public:
  63068. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureKHR;
  63069. const void* pNext = {};
  63070. uint32_t accelerationStructureCount = {};
  63071. const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures = {};
  63072. };
  63073. static_assert( sizeof( WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ), "struct and wrapper have different size!" );
  63074. static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureKHR>::value, "struct wrapper is not a standard layout!" );
  63075. template <>
  63076. struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureKHR>
  63077. {
  63078. using Type = WriteDescriptorSetAccelerationStructureKHR;
  63079. };
  63080. struct WriteDescriptorSetAccelerationStructureNV
  63081. {
  63082. static const bool allowDuplicate = false;
  63083. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
  63084. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63085. VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV(uint32_t accelerationStructureCount_ = {}, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ = {}) VULKAN_HPP_NOEXCEPT
  63086. : accelerationStructureCount( accelerationStructureCount_ ), pAccelerationStructures( pAccelerationStructures_ )
  63087. {}
  63088. VULKAN_HPP_CONSTEXPR WriteDescriptorSetAccelerationStructureNV( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63089. WriteDescriptorSetAccelerationStructureNV( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
  63090. : WriteDescriptorSetAccelerationStructureNV( *reinterpret_cast<WriteDescriptorSetAccelerationStructureNV const *>( &rhs ) )
  63091. {}
  63092. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63093. WriteDescriptorSetAccelerationStructureNV( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ )
  63094. : accelerationStructureCount( static_cast<uint32_t>( accelerationStructures_.size() ) ), pAccelerationStructures( accelerationStructures_.data() )
  63095. {}
  63096. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63097. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63098. VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetAccelerationStructureNV & operator=( WriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63099. WriteDescriptorSetAccelerationStructureNV & operator=( VkWriteDescriptorSetAccelerationStructureNV const & rhs ) VULKAN_HPP_NOEXCEPT
  63100. {
  63101. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV const *>( &rhs );
  63102. return *this;
  63103. }
  63104. WriteDescriptorSetAccelerationStructureNV & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  63105. {
  63106. pNext = pNext_;
  63107. return *this;
  63108. }
  63109. WriteDescriptorSetAccelerationStructureNV & setAccelerationStructureCount( uint32_t accelerationStructureCount_ ) VULKAN_HPP_NOEXCEPT
  63110. {
  63111. accelerationStructureCount = accelerationStructureCount_;
  63112. return *this;
  63113. }
  63114. WriteDescriptorSetAccelerationStructureNV & setPAccelerationStructures( const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures_ ) VULKAN_HPP_NOEXCEPT
  63115. {
  63116. pAccelerationStructures = pAccelerationStructures_;
  63117. return *this;
  63118. }
  63119. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63120. WriteDescriptorSetAccelerationStructureNV & setAccelerationStructures( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures_ ) VULKAN_HPP_NOEXCEPT
  63121. {
  63122. accelerationStructureCount = static_cast<uint32_t>( accelerationStructures_.size() );
  63123. pAccelerationStructures = accelerationStructures_.data();
  63124. return *this;
  63125. }
  63126. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63127. operator VkWriteDescriptorSetAccelerationStructureNV const&() const VULKAN_HPP_NOEXCEPT
  63128. {
  63129. return *reinterpret_cast<const VkWriteDescriptorSetAccelerationStructureNV*>( this );
  63130. }
  63131. operator VkWriteDescriptorSetAccelerationStructureNV &() VULKAN_HPP_NOEXCEPT
  63132. {
  63133. return *reinterpret_cast<VkWriteDescriptorSetAccelerationStructureNV*>( this );
  63134. }
  63135. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63136. auto operator<=>( WriteDescriptorSetAccelerationStructureNV const& ) const = default;
  63137. #else
  63138. bool operator==( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  63139. {
  63140. return ( sType == rhs.sType )
  63141. && ( pNext == rhs.pNext )
  63142. && ( accelerationStructureCount == rhs.accelerationStructureCount )
  63143. && ( pAccelerationStructures == rhs.pAccelerationStructures );
  63144. }
  63145. bool operator!=( WriteDescriptorSetAccelerationStructureNV const& rhs ) const VULKAN_HPP_NOEXCEPT
  63146. {
  63147. return !operator==( rhs );
  63148. }
  63149. #endif
  63150. public:
  63151. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetAccelerationStructureNV;
  63152. const void* pNext = {};
  63153. uint32_t accelerationStructureCount = {};
  63154. const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures = {};
  63155. };
  63156. static_assert( sizeof( WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ), "struct and wrapper have different size!" );
  63157. static_assert( std::is_standard_layout<WriteDescriptorSetAccelerationStructureNV>::value, "struct wrapper is not a standard layout!" );
  63158. template <>
  63159. struct CppType<StructureType, StructureType::eWriteDescriptorSetAccelerationStructureNV>
  63160. {
  63161. using Type = WriteDescriptorSetAccelerationStructureNV;
  63162. };
  63163. struct WriteDescriptorSetInlineUniformBlockEXT
  63164. {
  63165. static const bool allowDuplicate = false;
  63166. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
  63167. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63168. VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT(uint32_t dataSize_ = {}, const void* pData_ = {}) VULKAN_HPP_NOEXCEPT
  63169. : dataSize( dataSize_ ), pData( pData_ )
  63170. {}
  63171. VULKAN_HPP_CONSTEXPR WriteDescriptorSetInlineUniformBlockEXT( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63172. WriteDescriptorSetInlineUniformBlockEXT( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  63173. : WriteDescriptorSetInlineUniformBlockEXT( *reinterpret_cast<WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs ) )
  63174. {}
  63175. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63176. template <typename T>
  63177. WriteDescriptorSetInlineUniformBlockEXT( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ )
  63178. : dataSize( static_cast<uint32_t>( data_.size() * sizeof(T) ) ), pData( data_.data() )
  63179. {}
  63180. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63181. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63182. VULKAN_HPP_CONSTEXPR_14 WriteDescriptorSetInlineUniformBlockEXT & operator=( WriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63183. WriteDescriptorSetInlineUniformBlockEXT & operator=( VkWriteDescriptorSetInlineUniformBlockEXT const & rhs ) VULKAN_HPP_NOEXCEPT
  63184. {
  63185. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT const *>( &rhs );
  63186. return *this;
  63187. }
  63188. WriteDescriptorSetInlineUniformBlockEXT & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  63189. {
  63190. pNext = pNext_;
  63191. return *this;
  63192. }
  63193. WriteDescriptorSetInlineUniformBlockEXT & setDataSize( uint32_t dataSize_ ) VULKAN_HPP_NOEXCEPT
  63194. {
  63195. dataSize = dataSize_;
  63196. return *this;
  63197. }
  63198. WriteDescriptorSetInlineUniformBlockEXT & setPData( const void* pData_ ) VULKAN_HPP_NOEXCEPT
  63199. {
  63200. pData = pData_;
  63201. return *this;
  63202. }
  63203. #if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63204. template <typename T>
  63205. WriteDescriptorSetInlineUniformBlockEXT & setData( VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries<const T> const & data_ ) VULKAN_HPP_NOEXCEPT
  63206. {
  63207. dataSize = static_cast<uint32_t>( data_.size() * sizeof(T) );
  63208. pData = data_.data();
  63209. return *this;
  63210. }
  63211. #endif // !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
  63212. operator VkWriteDescriptorSetInlineUniformBlockEXT const&() const VULKAN_HPP_NOEXCEPT
  63213. {
  63214. return *reinterpret_cast<const VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
  63215. }
  63216. operator VkWriteDescriptorSetInlineUniformBlockEXT &() VULKAN_HPP_NOEXCEPT
  63217. {
  63218. return *reinterpret_cast<VkWriteDescriptorSetInlineUniformBlockEXT*>( this );
  63219. }
  63220. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63221. auto operator<=>( WriteDescriptorSetInlineUniformBlockEXT const& ) const = default;
  63222. #else
  63223. bool operator==( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  63224. {
  63225. return ( sType == rhs.sType )
  63226. && ( pNext == rhs.pNext )
  63227. && ( dataSize == rhs.dataSize )
  63228. && ( pData == rhs.pData );
  63229. }
  63230. bool operator!=( WriteDescriptorSetInlineUniformBlockEXT const& rhs ) const VULKAN_HPP_NOEXCEPT
  63231. {
  63232. return !operator==( rhs );
  63233. }
  63234. #endif
  63235. public:
  63236. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eWriteDescriptorSetInlineUniformBlockEXT;
  63237. const void* pNext = {};
  63238. uint32_t dataSize = {};
  63239. const void* pData = {};
  63240. };
  63241. static_assert( sizeof( WriteDescriptorSetInlineUniformBlockEXT ) == sizeof( VkWriteDescriptorSetInlineUniformBlockEXT ), "struct and wrapper have different size!" );
  63242. static_assert( std::is_standard_layout<WriteDescriptorSetInlineUniformBlockEXT>::value, "struct wrapper is not a standard layout!" );
  63243. template <>
  63244. struct CppType<StructureType, StructureType::eWriteDescriptorSetInlineUniformBlockEXT>
  63245. {
  63246. using Type = WriteDescriptorSetInlineUniformBlockEXT;
  63247. };
  63248. #ifdef VK_USE_PLATFORM_XCB_KHR
  63249. struct XcbSurfaceCreateInfoKHR
  63250. {
  63251. static const bool allowDuplicate = false;
  63252. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXcbSurfaceCreateInfoKHR;
  63253. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63254. VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ = {}, xcb_connection_t* connection_ = {}, xcb_window_t window_ = {}) VULKAN_HPP_NOEXCEPT
  63255. : flags( flags_ ), connection( connection_ ), window( window_ )
  63256. {}
  63257. VULKAN_HPP_CONSTEXPR XcbSurfaceCreateInfoKHR( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63258. XcbSurfaceCreateInfoKHR( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63259. : XcbSurfaceCreateInfoKHR( *reinterpret_cast<XcbSurfaceCreateInfoKHR const *>( &rhs ) )
  63260. {}
  63261. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63262. VULKAN_HPP_CONSTEXPR_14 XcbSurfaceCreateInfoKHR & operator=( XcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63263. XcbSurfaceCreateInfoKHR & operator=( VkXcbSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63264. {
  63265. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR const *>( &rhs );
  63266. return *this;
  63267. }
  63268. XcbSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  63269. {
  63270. pNext = pNext_;
  63271. return *this;
  63272. }
  63273. XcbSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  63274. {
  63275. flags = flags_;
  63276. return *this;
  63277. }
  63278. XcbSurfaceCreateInfoKHR & setConnection( xcb_connection_t* connection_ ) VULKAN_HPP_NOEXCEPT
  63279. {
  63280. connection = connection_;
  63281. return *this;
  63282. }
  63283. XcbSurfaceCreateInfoKHR & setWindow( xcb_window_t window_ ) VULKAN_HPP_NOEXCEPT
  63284. {
  63285. window = window_;
  63286. return *this;
  63287. }
  63288. operator VkXcbSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  63289. {
  63290. return *reinterpret_cast<const VkXcbSurfaceCreateInfoKHR*>( this );
  63291. }
  63292. operator VkXcbSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  63293. {
  63294. return *reinterpret_cast<VkXcbSurfaceCreateInfoKHR*>( this );
  63295. }
  63296. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63297. auto operator<=>( XcbSurfaceCreateInfoKHR const& ) const = default;
  63298. #else
  63299. bool operator==( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63300. {
  63301. return ( sType == rhs.sType )
  63302. && ( pNext == rhs.pNext )
  63303. && ( flags == rhs.flags )
  63304. && ( connection == rhs.connection )
  63305. && ( memcmp( &window, &rhs.window, sizeof( xcb_window_t ) ) == 0 );
  63306. }
  63307. bool operator!=( XcbSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63308. {
  63309. return !operator==( rhs );
  63310. }
  63311. #endif
  63312. public:
  63313. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXcbSurfaceCreateInfoKHR;
  63314. const void* pNext = {};
  63315. VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR flags = {};
  63316. xcb_connection_t* connection = {};
  63317. xcb_window_t window = {};
  63318. };
  63319. static_assert( sizeof( XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  63320. static_assert( std::is_standard_layout<XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  63321. template <>
  63322. struct CppType<StructureType, StructureType::eXcbSurfaceCreateInfoKHR>
  63323. {
  63324. using Type = XcbSurfaceCreateInfoKHR;
  63325. };
  63326. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  63327. #ifdef VK_USE_PLATFORM_XLIB_KHR
  63328. struct XlibSurfaceCreateInfoKHR
  63329. {
  63330. static const bool allowDuplicate = false;
  63331. static VULKAN_HPP_CONST_OR_CONSTEXPR StructureType structureType = StructureType::eXlibSurfaceCreateInfoKHR;
  63332. #if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63333. VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR(VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ = {}, Display* dpy_ = {}, Window window_ = {}) VULKAN_HPP_NOEXCEPT
  63334. : flags( flags_ ), dpy( dpy_ ), window( window_ )
  63335. {}
  63336. VULKAN_HPP_CONSTEXPR XlibSurfaceCreateInfoKHR( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63337. XlibSurfaceCreateInfoKHR( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63338. : XlibSurfaceCreateInfoKHR( *reinterpret_cast<XlibSurfaceCreateInfoKHR const *>( &rhs ) )
  63339. {}
  63340. #endif // !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
  63341. VULKAN_HPP_CONSTEXPR_14 XlibSurfaceCreateInfoKHR & operator=( XlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT = default;
  63342. XlibSurfaceCreateInfoKHR & operator=( VkXlibSurfaceCreateInfoKHR const & rhs ) VULKAN_HPP_NOEXCEPT
  63343. {
  63344. *this = *reinterpret_cast<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR const *>( &rhs );
  63345. return *this;
  63346. }
  63347. XlibSurfaceCreateInfoKHR & setPNext( const void* pNext_ ) VULKAN_HPP_NOEXCEPT
  63348. {
  63349. pNext = pNext_;
  63350. return *this;
  63351. }
  63352. XlibSurfaceCreateInfoKHR & setFlags( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags_ ) VULKAN_HPP_NOEXCEPT
  63353. {
  63354. flags = flags_;
  63355. return *this;
  63356. }
  63357. XlibSurfaceCreateInfoKHR & setDpy( Display* dpy_ ) VULKAN_HPP_NOEXCEPT
  63358. {
  63359. dpy = dpy_;
  63360. return *this;
  63361. }
  63362. XlibSurfaceCreateInfoKHR & setWindow( Window window_ ) VULKAN_HPP_NOEXCEPT
  63363. {
  63364. window = window_;
  63365. return *this;
  63366. }
  63367. operator VkXlibSurfaceCreateInfoKHR const&() const VULKAN_HPP_NOEXCEPT
  63368. {
  63369. return *reinterpret_cast<const VkXlibSurfaceCreateInfoKHR*>( this );
  63370. }
  63371. operator VkXlibSurfaceCreateInfoKHR &() VULKAN_HPP_NOEXCEPT
  63372. {
  63373. return *reinterpret_cast<VkXlibSurfaceCreateInfoKHR*>( this );
  63374. }
  63375. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63376. auto operator<=>( XlibSurfaceCreateInfoKHR const& ) const = default;
  63377. #else
  63378. bool operator==( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63379. {
  63380. return ( sType == rhs.sType )
  63381. && ( pNext == rhs.pNext )
  63382. && ( flags == rhs.flags )
  63383. && ( dpy == rhs.dpy )
  63384. && ( memcmp( &window, &rhs.window, sizeof( Window ) ) == 0 );
  63385. }
  63386. bool operator!=( XlibSurfaceCreateInfoKHR const& rhs ) const VULKAN_HPP_NOEXCEPT
  63387. {
  63388. return !operator==( rhs );
  63389. }
  63390. #endif
  63391. public:
  63392. VULKAN_HPP_NAMESPACE::StructureType sType = StructureType::eXlibSurfaceCreateInfoKHR;
  63393. const void* pNext = {};
  63394. VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR flags = {};
  63395. Display* dpy = {};
  63396. Window window = {};
  63397. };
  63398. static_assert( sizeof( XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ), "struct and wrapper have different size!" );
  63399. static_assert( std::is_standard_layout<XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
  63400. template <>
  63401. struct CppType<StructureType, StructureType::eXlibSurfaceCreateInfoKHR>
  63402. {
  63403. using Type = XlibSurfaceCreateInfoKHR;
  63404. };
  63405. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  63406. class DebugReportCallbackEXT
  63407. {
  63408. public:
  63409. using CType = VkDebugReportCallbackEXT;
  63410. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT;
  63411. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT;
  63412. public:
  63413. VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT() VULKAN_HPP_NOEXCEPT
  63414. : m_debugReportCallbackEXT(VK_NULL_HANDLE)
  63415. {}
  63416. VULKAN_HPP_CONSTEXPR DebugReportCallbackEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63417. : m_debugReportCallbackEXT(VK_NULL_HANDLE)
  63418. {}
  63419. VULKAN_HPP_TYPESAFE_EXPLICIT DebugReportCallbackEXT( VkDebugReportCallbackEXT debugReportCallbackEXT ) VULKAN_HPP_NOEXCEPT
  63420. : m_debugReportCallbackEXT( debugReportCallbackEXT )
  63421. {}
  63422. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  63423. DebugReportCallbackEXT & operator=(VkDebugReportCallbackEXT debugReportCallbackEXT) VULKAN_HPP_NOEXCEPT
  63424. {
  63425. m_debugReportCallbackEXT = debugReportCallbackEXT;
  63426. return *this;
  63427. }
  63428. #endif
  63429. DebugReportCallbackEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63430. {
  63431. m_debugReportCallbackEXT = VK_NULL_HANDLE;
  63432. return *this;
  63433. }
  63434. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63435. auto operator<=>( DebugReportCallbackEXT const& ) const = default;
  63436. #else
  63437. bool operator==( DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63438. {
  63439. return m_debugReportCallbackEXT == rhs.m_debugReportCallbackEXT;
  63440. }
  63441. bool operator!=(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63442. {
  63443. return m_debugReportCallbackEXT != rhs.m_debugReportCallbackEXT;
  63444. }
  63445. bool operator<(DebugReportCallbackEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63446. {
  63447. return m_debugReportCallbackEXT < rhs.m_debugReportCallbackEXT;
  63448. }
  63449. #endif
  63450. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugReportCallbackEXT() const VULKAN_HPP_NOEXCEPT
  63451. {
  63452. return m_debugReportCallbackEXT;
  63453. }
  63454. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  63455. {
  63456. return m_debugReportCallbackEXT != VK_NULL_HANDLE;
  63457. }
  63458. bool operator!() const VULKAN_HPP_NOEXCEPT
  63459. {
  63460. return m_debugReportCallbackEXT == VK_NULL_HANDLE;
  63461. }
  63462. private:
  63463. VkDebugReportCallbackEXT m_debugReportCallbackEXT;
  63464. };
  63465. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ), "handle and wrapper have different size!" );
  63466. template <>
  63467. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugReportCallbackEXT>
  63468. {
  63469. using type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  63470. };
  63471. template <>
  63472. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugReportCallbackEXT>
  63473. {
  63474. using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  63475. };
  63476. template <>
  63477. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eDebugReportCallbackEXT>
  63478. {
  63479. using Type = VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
  63480. };
  63481. template <>
  63482. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
  63483. {
  63484. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  63485. };
  63486. class DebugUtilsMessengerEXT
  63487. {
  63488. public:
  63489. using CType = VkDebugUtilsMessengerEXT;
  63490. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT;
  63491. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eUnknown;
  63492. public:
  63493. VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT() VULKAN_HPP_NOEXCEPT
  63494. : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
  63495. {}
  63496. VULKAN_HPP_CONSTEXPR DebugUtilsMessengerEXT( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63497. : m_debugUtilsMessengerEXT(VK_NULL_HANDLE)
  63498. {}
  63499. VULKAN_HPP_TYPESAFE_EXPLICIT DebugUtilsMessengerEXT( VkDebugUtilsMessengerEXT debugUtilsMessengerEXT ) VULKAN_HPP_NOEXCEPT
  63500. : m_debugUtilsMessengerEXT( debugUtilsMessengerEXT )
  63501. {}
  63502. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  63503. DebugUtilsMessengerEXT & operator=(VkDebugUtilsMessengerEXT debugUtilsMessengerEXT) VULKAN_HPP_NOEXCEPT
  63504. {
  63505. m_debugUtilsMessengerEXT = debugUtilsMessengerEXT;
  63506. return *this;
  63507. }
  63508. #endif
  63509. DebugUtilsMessengerEXT & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63510. {
  63511. m_debugUtilsMessengerEXT = VK_NULL_HANDLE;
  63512. return *this;
  63513. }
  63514. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63515. auto operator<=>( DebugUtilsMessengerEXT const& ) const = default;
  63516. #else
  63517. bool operator==( DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63518. {
  63519. return m_debugUtilsMessengerEXT == rhs.m_debugUtilsMessengerEXT;
  63520. }
  63521. bool operator!=(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63522. {
  63523. return m_debugUtilsMessengerEXT != rhs.m_debugUtilsMessengerEXT;
  63524. }
  63525. bool operator<(DebugUtilsMessengerEXT const & rhs ) const VULKAN_HPP_NOEXCEPT
  63526. {
  63527. return m_debugUtilsMessengerEXT < rhs.m_debugUtilsMessengerEXT;
  63528. }
  63529. #endif
  63530. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkDebugUtilsMessengerEXT() const VULKAN_HPP_NOEXCEPT
  63531. {
  63532. return m_debugUtilsMessengerEXT;
  63533. }
  63534. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  63535. {
  63536. return m_debugUtilsMessengerEXT != VK_NULL_HANDLE;
  63537. }
  63538. bool operator!() const VULKAN_HPP_NOEXCEPT
  63539. {
  63540. return m_debugUtilsMessengerEXT == VK_NULL_HANDLE;
  63541. }
  63542. private:
  63543. VkDebugUtilsMessengerEXT m_debugUtilsMessengerEXT;
  63544. };
  63545. static_assert( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ), "handle and wrapper have different size!" );
  63546. template <>
  63547. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eDebugUtilsMessengerEXT>
  63548. {
  63549. using type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
  63550. };
  63551. template <>
  63552. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eDebugUtilsMessengerEXT>
  63553. {
  63554. using Type = VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
  63555. };
  63556. template <>
  63557. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
  63558. {
  63559. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  63560. };
  63561. #ifndef VULKAN_HPP_NO_SMART_HANDLE
  63562. class Instance;
  63563. template <typename Dispatch> class UniqueHandleTraits<DebugReportCallbackEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
  63564. using UniqueDebugReportCallbackEXT = UniqueHandle<DebugReportCallbackEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  63565. template <typename Dispatch> class UniqueHandleTraits<DebugUtilsMessengerEXT, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
  63566. using UniqueDebugUtilsMessengerEXT = UniqueHandle<DebugUtilsMessengerEXT, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  63567. template <typename Dispatch> class UniqueHandleTraits<SurfaceKHR, Dispatch> { public: using deleter = ObjectDestroy<Instance, Dispatch>; };
  63568. using UniqueSurfaceKHR = UniqueHandle<SurfaceKHR, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  63569. #endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63570. class Instance
  63571. {
  63572. public:
  63573. using CType = VkInstance;
  63574. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::ObjectType objectType = VULKAN_HPP_NAMESPACE::ObjectType::eInstance;
  63575. static VULKAN_HPP_CONST_OR_CONSTEXPR VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT debugReportObjectType = VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance;
  63576. public:
  63577. VULKAN_HPP_CONSTEXPR Instance() VULKAN_HPP_NOEXCEPT
  63578. : m_instance(VK_NULL_HANDLE)
  63579. {}
  63580. VULKAN_HPP_CONSTEXPR Instance( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63581. : m_instance(VK_NULL_HANDLE)
  63582. {}
  63583. VULKAN_HPP_TYPESAFE_EXPLICIT Instance( VkInstance instance ) VULKAN_HPP_NOEXCEPT
  63584. : m_instance( instance )
  63585. {}
  63586. #if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
  63587. Instance & operator=(VkInstance instance) VULKAN_HPP_NOEXCEPT
  63588. {
  63589. m_instance = instance;
  63590. return *this;
  63591. }
  63592. #endif
  63593. Instance & operator=( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
  63594. {
  63595. m_instance = VK_NULL_HANDLE;
  63596. return *this;
  63597. }
  63598. #if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
  63599. auto operator<=>( Instance const& ) const = default;
  63600. #else
  63601. bool operator==( Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
  63602. {
  63603. return m_instance == rhs.m_instance;
  63604. }
  63605. bool operator!=(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
  63606. {
  63607. return m_instance != rhs.m_instance;
  63608. }
  63609. bool operator<(Instance const & rhs ) const VULKAN_HPP_NOEXCEPT
  63610. {
  63611. return m_instance < rhs.m_instance;
  63612. }
  63613. #endif
  63614. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  63615. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63616. VULKAN_HPP_NODISCARD Result createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63617. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63618. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63619. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63620. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63621. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63622. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63623. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63624. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63625. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  63626. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63627. VULKAN_HPP_NODISCARD Result createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63628. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63629. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63630. typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63631. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63632. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63633. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63634. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63635. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63636. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63637. VULKAN_HPP_NODISCARD Result createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63638. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63639. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63640. typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63641. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63642. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63643. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63644. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63645. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63646. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  63647. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63648. VULKAN_HPP_NODISCARD Result createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63649. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63650. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63651. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63652. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63653. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63654. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63655. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63656. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63657. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  63658. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63659. VULKAN_HPP_NODISCARD Result createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63660. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63661. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63662. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63663. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63664. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63665. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63666. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63667. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63668. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63669. VULKAN_HPP_NODISCARD Result createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63670. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63671. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63672. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63673. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63674. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63675. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63676. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63677. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63678. #ifdef VK_USE_PLATFORM_IOS_MVK
  63679. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63680. VULKAN_HPP_NODISCARD Result createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63681. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63682. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63683. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63684. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63685. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63686. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63687. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63688. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63689. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  63690. #ifdef VK_USE_PLATFORM_FUCHSIA
  63691. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63692. VULKAN_HPP_NODISCARD Result createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63693. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63694. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63695. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63696. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63697. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63698. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63699. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63700. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63701. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  63702. #ifdef VK_USE_PLATFORM_MACOS_MVK
  63703. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63704. VULKAN_HPP_NODISCARD Result createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63705. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63706. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63707. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63708. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63709. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63710. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63711. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63712. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63713. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  63714. #ifdef VK_USE_PLATFORM_METAL_EXT
  63715. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63716. VULKAN_HPP_NODISCARD Result createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63717. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63718. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63719. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63720. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63721. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63722. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63723. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63724. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63725. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  63726. #ifdef VK_USE_PLATFORM_GGP
  63727. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63728. VULKAN_HPP_NODISCARD Result createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63729. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63730. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63731. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63732. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63733. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63734. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63735. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63736. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63737. #endif /*VK_USE_PLATFORM_GGP*/
  63738. #ifdef VK_USE_PLATFORM_VI_NN
  63739. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63740. VULKAN_HPP_NODISCARD Result createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63741. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63742. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63743. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63744. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63745. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63746. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63747. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63748. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63749. #endif /*VK_USE_PLATFORM_VI_NN*/
  63750. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  63751. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63752. VULKAN_HPP_NODISCARD Result createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63753. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63754. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63755. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63756. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63757. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63758. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63759. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63760. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63761. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  63762. #ifdef VK_USE_PLATFORM_WIN32_KHR
  63763. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63764. VULKAN_HPP_NODISCARD Result createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63765. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63766. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63767. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63768. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63769. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63770. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63771. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63772. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63773. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  63774. #ifdef VK_USE_PLATFORM_XCB_KHR
  63775. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63776. VULKAN_HPP_NODISCARD Result createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63777. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63778. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63779. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63780. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63781. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63782. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63783. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63784. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63785. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  63786. #ifdef VK_USE_PLATFORM_XLIB_KHR
  63787. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63788. VULKAN_HPP_NODISCARD Result createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63789. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63790. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63791. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63792. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63793. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63794. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63795. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63796. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63797. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  63798. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63799. void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63800. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63801. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63802. void debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63803. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63804. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63805. void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63806. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63807. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63808. void destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63809. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63810. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63811. void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63812. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63813. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63814. void destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63815. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63816. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63817. void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63818. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63819. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63820. void destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63821. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63822. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63823. void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63824. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63825. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63826. void destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63827. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63828. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63829. void destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63830. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63831. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63832. void destroy( Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63833. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63834. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63835. void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63836. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63837. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63838. void destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63839. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63840. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63841. void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63842. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63843. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63844. void destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63845. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63846. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63847. VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63848. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63849. template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63850. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63851. template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PhysicalDeviceGroupPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
  63852. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63853. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63854. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63855. VULKAN_HPP_NODISCARD Result enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63856. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63857. template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63858. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63859. template <typename PhysicalDeviceGroupPropertiesAllocator = std::allocator<PhysicalDeviceGroupProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PhysicalDeviceGroupPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type = 0>
  63860. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63861. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63862. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63863. VULKAN_HPP_NODISCARD Result enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63864. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63865. template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63866. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63867. template <typename PhysicalDeviceAllocator = std::allocator<PhysicalDevice>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = PhysicalDeviceAllocator, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type = 0>
  63868. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const;
  63869. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63870. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63871. PFN_vkVoidFunction getProcAddr( const char* pName, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63872. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63873. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63874. PFN_vkVoidFunction getProcAddr( const std::string & name, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63875. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63876. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63877. void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63878. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63879. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63880. void submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) const VULKAN_HPP_NOEXCEPT;
  63881. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63882. VULKAN_HPP_TYPESAFE_EXPLICIT operator VkInstance() const VULKAN_HPP_NOEXCEPT
  63883. {
  63884. return m_instance;
  63885. }
  63886. explicit operator bool() const VULKAN_HPP_NOEXCEPT
  63887. {
  63888. return m_instance != VK_NULL_HANDLE;
  63889. }
  63890. bool operator!() const VULKAN_HPP_NOEXCEPT
  63891. {
  63892. return m_instance == VK_NULL_HANDLE;
  63893. }
  63894. private:
  63895. VkInstance m_instance;
  63896. };
  63897. static_assert( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
  63898. template <>
  63899. struct VULKAN_HPP_DEPRECATED("vk::cpp_type is deprecated. Use vk::CppType instead.") cpp_type<ObjectType::eInstance>
  63900. {
  63901. using type = VULKAN_HPP_NAMESPACE::Instance;
  63902. };
  63903. template <>
  63904. struct CppType<VULKAN_HPP_NAMESPACE::ObjectType, VULKAN_HPP_NAMESPACE::ObjectType::eInstance>
  63905. {
  63906. using Type = VULKAN_HPP_NAMESPACE::Instance;
  63907. };
  63908. template <>
  63909. struct CppType<VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT::eInstance>
  63910. {
  63911. using Type = VULKAN_HPP_NAMESPACE::Instance;
  63912. };
  63913. template <>
  63914. struct isVulkanHandleType<VULKAN_HPP_NAMESPACE::Instance>
  63915. {
  63916. static VULKAN_HPP_CONST_OR_CONSTEXPR bool value = true;
  63917. };
  63918. #ifndef VULKAN_HPP_NO_SMART_HANDLE
  63919. template <typename Dispatch> class UniqueHandleTraits<Instance, Dispatch> { public: using deleter = ObjectDestroy<NoParent, Dispatch>; };
  63920. using UniqueInstance = UniqueHandle<Instance, VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>;
  63921. #endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63922. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63923. VULKAN_HPP_NODISCARD Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
  63924. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63925. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63926. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63927. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63928. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63929. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63930. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63931. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63932. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63933. VULKAN_HPP_NODISCARD Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
  63934. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63935. template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63936. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63937. template <typename ExtensionPropertiesAllocator = std::allocator<ExtensionProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = ExtensionPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type = 0>
  63938. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63939. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63940. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63941. VULKAN_HPP_NODISCARD Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
  63942. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63943. template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63944. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63945. template <typename LayerPropertiesAllocator = std::allocator<LayerProperties>, typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE, typename B = LayerPropertiesAllocator, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type = 0>
  63946. VULKAN_HPP_NODISCARD typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63947. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63948. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63949. VULKAN_HPP_NODISCARD Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT;
  63950. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63951. template <typename Dispatch = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
  63952. typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT );
  63953. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63954. template <typename Dispatch>
  63955. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Instance* pInstance, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  63956. {
  63957. return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkInstance *>( pInstance ) ) );
  63958. }
  63959. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63960. template <typename Dispatch>
  63961. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d )
  63962. {
  63963. VULKAN_HPP_NAMESPACE::Instance instance;
  63964. Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) ) );
  63965. return createResultValue( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
  63966. }
  63967. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  63968. template <typename Dispatch>
  63969. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const InstanceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d )
  63970. {
  63971. VULKAN_HPP_NAMESPACE::Instance instance;
  63972. Result result = static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) ) );
  63973. ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
  63974. return createResultValue<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( result, instance, VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique", deleter );
  63975. }
  63976. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  63977. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  63978. template <typename Dispatch>
  63979. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  63980. {
  63981. return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast< VkExtensionProperties *>( pProperties ) ) );
  63982. }
  63983. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  63984. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  63985. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
  63986. {
  63987. std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
  63988. uint32_t propertyCount;
  63989. Result result;
  63990. do
  63991. {
  63992. result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
  63993. if ( ( result == Result::eSuccess ) && propertyCount )
  63994. {
  63995. properties.resize( propertyCount );
  63996. result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
  63997. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  63998. }
  63999. } while ( result == Result::eIncomplete );
  64000. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  64001. {
  64002. properties.resize( propertyCount );
  64003. }
  64004. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
  64005. }
  64006. template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type >
  64007. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d )
  64008. {
  64009. std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  64010. uint32_t propertyCount;
  64011. Result result;
  64012. do
  64013. {
  64014. result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
  64015. if ( ( result == Result::eSuccess ) && propertyCount )
  64016. {
  64017. properties.resize( propertyCount );
  64018. result = static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
  64019. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  64020. }
  64021. } while ( result == Result::eIncomplete );
  64022. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  64023. {
  64024. properties.resize( propertyCount );
  64025. }
  64026. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceExtensionProperties" );
  64027. }
  64028. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64029. template <typename Dispatch>
  64030. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  64031. {
  64032. return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast< VkLayerProperties *>( pProperties ) ) );
  64033. }
  64034. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64035. template <typename LayerPropertiesAllocator, typename Dispatch>
  64036. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d )
  64037. {
  64038. std::vector<LayerProperties, LayerPropertiesAllocator> properties;
  64039. uint32_t propertyCount;
  64040. Result result;
  64041. do
  64042. {
  64043. result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
  64044. if ( ( result == Result::eSuccess ) && propertyCount )
  64045. {
  64046. properties.resize( propertyCount );
  64047. result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
  64048. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  64049. }
  64050. } while ( result == Result::eIncomplete );
  64051. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  64052. {
  64053. properties.resize( propertyCount );
  64054. }
  64055. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
  64056. }
  64057. template <typename LayerPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type >
  64058. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
  64059. {
  64060. std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  64061. uint32_t propertyCount;
  64062. Result result;
  64063. do
  64064. {
  64065. result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ) );
  64066. if ( ( result == Result::eSuccess ) && propertyCount )
  64067. {
  64068. properties.resize( propertyCount );
  64069. result = static_cast<Result>( d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
  64070. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  64071. }
  64072. } while ( result == Result::eIncomplete );
  64073. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  64074. {
  64075. properties.resize( propertyCount );
  64076. }
  64077. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::enumerateInstanceLayerProperties" );
  64078. }
  64079. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64080. template <typename Dispatch>
  64081. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t* pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT
  64082. {
  64083. return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
  64084. }
  64085. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64086. template <typename Dispatch>
  64087. VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
  64088. {
  64089. uint32_t apiVersion;
  64090. Result result = static_cast<Result>( d.vkEnumerateInstanceVersion( &apiVersion ) );
  64091. return createResultValue( result, apiVersion, VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
  64092. }
  64093. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64094. template <typename Dispatch>
  64095. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo* pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64096. {
  64097. return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
  64098. }
  64099. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64100. template <typename Dispatch>
  64101. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
  64102. {
  64103. Result result = static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ) );
  64104. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
  64105. }
  64106. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64107. template <typename Dispatch>
  64108. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64109. {
  64110. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) );
  64111. }
  64112. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64113. template <typename Dispatch>
  64114. VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64115. {
  64116. d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) );
  64117. }
  64118. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64119. template <typename Dispatch>
  64120. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64121. {
  64122. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  64123. }
  64124. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64125. template <typename Dispatch>
  64126. VULKAN_HPP_INLINE void CommandBuffer::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64127. {
  64128. d.vkCmdBeginDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  64129. }
  64130. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64131. template <typename Dispatch>
  64132. VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64133. {
  64134. d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
  64135. }
  64136. template <typename Dispatch>
  64137. VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64138. {
  64139. d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index );
  64140. }
  64141. template <typename Dispatch>
  64142. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64143. {
  64144. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  64145. }
  64146. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64147. template <typename Dispatch>
  64148. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64149. {
  64150. d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
  64151. }
  64152. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64153. template <typename Dispatch>
  64154. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64155. {
  64156. d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  64157. }
  64158. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64159. template <typename Dispatch>
  64160. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64161. {
  64162. d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  64163. }
  64164. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64165. template <typename Dispatch>
  64166. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo* pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64167. {
  64168. d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
  64169. }
  64170. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64171. template <typename Dispatch>
  64172. VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const RenderPassBeginInfo & renderPassBegin, const SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64173. {
  64174. d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
  64175. }
  64176. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64177. template <typename Dispatch>
  64178. VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64179. {
  64180. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  64181. }
  64182. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64183. template <typename Dispatch>
  64184. VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64185. {
  64186. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64187. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  64188. #else
  64189. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  64190. {
  64191. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  64192. }
  64193. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64194. d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  64195. }
  64196. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64197. template <typename Dispatch>
  64198. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64199. {
  64200. d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
  64201. }
  64202. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64203. template <typename Dispatch>
  64204. VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64205. {
  64206. d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
  64207. }
  64208. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64209. template <typename Dispatch>
  64210. VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64211. {
  64212. d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
  64213. }
  64214. template <typename Dispatch>
  64215. VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64216. {
  64217. d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
  64218. }
  64219. template <typename Dispatch>
  64220. VULKAN_HPP_INLINE void CommandBuffer::bindPipelineShaderGroupNV( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t groupIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64221. {
  64222. d.vkCmdBindPipelineShaderGroupNV( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ), groupIndex );
  64223. }
  64224. template <typename Dispatch>
  64225. VULKAN_HPP_INLINE void CommandBuffer::bindShadingRateImageNV( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64226. {
  64227. d.vkCmdBindShadingRateImageNV( m_commandBuffer, static_cast<VkImageView>( imageView ), static_cast<VkImageLayout>( imageLayout ) );
  64228. }
  64229. template <typename Dispatch>
  64230. VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64231. {
  64232. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ) );
  64233. }
  64234. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64235. template <typename Dispatch>
  64236. VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64237. {
  64238. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64239. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  64240. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  64241. #else
  64242. if ( buffers.size() != offsets.size() )
  64243. {
  64244. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" );
  64245. }
  64246. if ( !sizes.empty() && buffers.size() != sizes.size() )
  64247. {
  64248. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" );
  64249. }
  64250. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64251. d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ) );
  64252. }
  64253. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64254. template <typename Dispatch>
  64255. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64256. {
  64257. d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
  64258. }
  64259. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64260. template <typename Dispatch>
  64261. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64262. {
  64263. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64264. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  64265. #else
  64266. if ( buffers.size() != offsets.size() )
  64267. {
  64268. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
  64269. }
  64270. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64271. d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
  64272. }
  64273. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64274. template <typename Dispatch>
  64275. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer* pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize* pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize* pStrides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64276. {
  64277. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) );
  64278. }
  64279. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64280. template <typename Dispatch>
  64281. VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2EXT( uint32_t firstBinding, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64282. {
  64283. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64284. VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
  64285. VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() );
  64286. VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() );
  64287. #else
  64288. if ( buffers.size() != offsets.size() )
  64289. {
  64290. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != offsets.size()" );
  64291. }
  64292. if ( !sizes.empty() && buffers.size() != sizes.size() )
  64293. {
  64294. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != sizes.size()" );
  64295. }
  64296. if ( !strides.empty() && buffers.size() != strides.size() )
  64297. {
  64298. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2EXT: buffers.size() != strides.size()" );
  64299. }
  64300. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64301. d.vkCmdBindVertexBuffers2EXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) );
  64302. }
  64303. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64304. template <typename Dispatch>
  64305. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit* pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64306. {
  64307. d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) );
  64308. }
  64309. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64310. template <typename Dispatch>
  64311. VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64312. {
  64313. d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
  64314. }
  64315. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64316. template <typename Dispatch>
  64317. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR* pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64318. {
  64319. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( pBlitImageInfo ) );
  64320. }
  64321. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64322. template <typename Dispatch>
  64323. VULKAN_HPP_INLINE void CommandBuffer::blitImage2KHR( const BlitImageInfo2KHR & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64324. {
  64325. d.vkCmdBlitImage2KHR( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2KHR *>( &blitImageInfo ) );
  64326. }
  64327. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64328. template <typename Dispatch>
  64329. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV* pInfo, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64330. {
  64331. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( pInfo ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
  64332. }
  64333. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64334. template <typename Dispatch>
  64335. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructureNV( const AccelerationStructureInfoNV & info, VULKAN_HPP_NAMESPACE::Buffer instanceData, VULKAN_HPP_NAMESPACE::DeviceSize instanceOffset, VULKAN_HPP_NAMESPACE::Bool32 update, VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::Buffer scratch, VULKAN_HPP_NAMESPACE::DeviceSize scratchOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64336. {
  64337. d.vkCmdBuildAccelerationStructureNV( m_commandBuffer, reinterpret_cast<const VkAccelerationStructureInfoNV *>( &info ), static_cast<VkBuffer>( instanceData ), static_cast<VkDeviceSize>( instanceOffset ), static_cast<VkBool32>( update ), static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkBuffer>( scratch ), static_cast<VkDeviceSize>( scratchOffset ) );
  64338. }
  64339. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64340. template <typename Dispatch>
  64341. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::DeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const * ppMaxPrimitiveCounts, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64342. {
  64343. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkDeviceAddress *>( pIndirectDeviceAddresses ), pIndirectStrides, ppMaxPrimitiveCounts );
  64344. }
  64345. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64346. template <typename Dispatch>
  64347. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresIndirectKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceAddress> const & indirectDeviceAddresses, ArrayProxy<const uint32_t> const & indirectStrides, ArrayProxy<const uint32_t* const > const & pMaxPrimitiveCounts, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64348. {
  64349. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64350. VULKAN_HPP_ASSERT( infos.size() == indirectDeviceAddresses.size() );
  64351. VULKAN_HPP_ASSERT( infos.size() == indirectStrides.size() );
  64352. VULKAN_HPP_ASSERT( infos.size() == pMaxPrimitiveCounts.size() );
  64353. #else
  64354. if ( infos.size() != indirectDeviceAddresses.size() )
  64355. {
  64356. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectDeviceAddresses.size()" );
  64357. }
  64358. if ( infos.size() != indirectStrides.size() )
  64359. {
  64360. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != indirectStrides.size()" );
  64361. }
  64362. if ( infos.size() != pMaxPrimitiveCounts.size() )
  64363. {
  64364. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresIndirectKHR: infos.size() != pMaxPrimitiveCounts.size()" );
  64365. }
  64366. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64367. d.vkCmdBuildAccelerationStructuresIndirectKHR( m_commandBuffer, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkDeviceAddress *>( indirectDeviceAddresses.data() ), indirectStrides.data(), pMaxPrimitiveCounts.data() );
  64368. }
  64369. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64370. template <typename Dispatch>
  64371. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64372. {
  64373. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) );
  64374. }
  64375. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64376. template <typename Dispatch>
  64377. VULKAN_HPP_INLINE void CommandBuffer::buildAccelerationStructuresKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64378. {
  64379. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64380. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  64381. #else
  64382. if ( infos.size() != pBuildRangeInfos.size() )
  64383. {
  64384. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  64385. }
  64386. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64387. d.vkCmdBuildAccelerationStructuresKHR( m_commandBuffer, infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) );
  64388. }
  64389. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64390. template <typename Dispatch>
  64391. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment* pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect* pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64392. {
  64393. d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) );
  64394. }
  64395. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64396. template <typename Dispatch>
  64397. VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64398. {
  64399. d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
  64400. }
  64401. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64402. template <typename Dispatch>
  64403. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue* pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64404. {
  64405. d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  64406. }
  64407. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64408. template <typename Dispatch>
  64409. VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearColorValue & color, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64410. {
  64411. d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  64412. }
  64413. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64414. template <typename Dispatch>
  64415. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange* pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64416. {
  64417. d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
  64418. }
  64419. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64420. template <typename Dispatch>
  64421. VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const ClearDepthStencilValue & depthStencil, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64422. {
  64423. d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
  64424. }
  64425. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64426. template <typename Dispatch>
  64427. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64428. {
  64429. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) );
  64430. }
  64431. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64432. template <typename Dispatch>
  64433. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureKHR( const CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64434. {
  64435. d.vkCmdCopyAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) );
  64436. }
  64437. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64438. template <typename Dispatch>
  64439. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV dst, VULKAN_HPP_NAMESPACE::AccelerationStructureNV src, VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR mode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64440. {
  64441. d.vkCmdCopyAccelerationStructureNV( m_commandBuffer, static_cast<VkAccelerationStructureNV>( dst ), static_cast<VkAccelerationStructureNV>( src ), static_cast<VkCopyAccelerationStructureModeKHR>( mode ) );
  64442. }
  64443. template <typename Dispatch>
  64444. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64445. {
  64446. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) );
  64447. }
  64448. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64449. template <typename Dispatch>
  64450. VULKAN_HPP_INLINE void CommandBuffer::copyAccelerationStructureToMemoryKHR( const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64451. {
  64452. d.vkCmdCopyAccelerationStructureToMemoryKHR( m_commandBuffer, reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) );
  64453. }
  64454. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64455. template <typename Dispatch>
  64456. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64457. {
  64458. d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) );
  64459. }
  64460. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64461. template <typename Dispatch>
  64462. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64463. {
  64464. d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
  64465. }
  64466. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64467. template <typename Dispatch>
  64468. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR* pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64469. {
  64470. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( pCopyBufferInfo ) );
  64471. }
  64472. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64473. template <typename Dispatch>
  64474. VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2KHR( const CopyBufferInfo2KHR & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64475. {
  64476. d.vkCmdCopyBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2KHR *>( &copyBufferInfo ) );
  64477. }
  64478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64479. template <typename Dispatch>
  64480. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64481. {
  64482. d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  64483. }
  64484. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64485. template <typename Dispatch>
  64486. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64487. {
  64488. d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  64489. }
  64490. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64491. template <typename Dispatch>
  64492. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR* pCopyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64493. {
  64494. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( pCopyBufferToImageInfo ) );
  64495. }
  64496. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64497. template <typename Dispatch>
  64498. VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2KHR( const CopyBufferToImageInfo2KHR & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64499. {
  64500. d.vkCmdCopyBufferToImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2KHR *>( &copyBufferToImageInfo ) );
  64501. }
  64502. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64503. template <typename Dispatch>
  64504. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64505. {
  64506. d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) );
  64507. }
  64508. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64509. template <typename Dispatch>
  64510. VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64511. {
  64512. d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
  64513. }
  64514. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64515. template <typename Dispatch>
  64516. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR* pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64517. {
  64518. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( pCopyImageInfo ) );
  64519. }
  64520. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64521. template <typename Dispatch>
  64522. VULKAN_HPP_INLINE void CommandBuffer::copyImage2KHR( const CopyImageInfo2KHR & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64523. {
  64524. d.vkCmdCopyImage2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2KHR *>( &copyImageInfo ) );
  64525. }
  64526. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64527. template <typename Dispatch>
  64528. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64529. {
  64530. d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
  64531. }
  64532. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64533. template <typename Dispatch>
  64534. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64535. {
  64536. d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
  64537. }
  64538. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64539. template <typename Dispatch>
  64540. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR* pCopyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64541. {
  64542. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( pCopyImageToBufferInfo ) );
  64543. }
  64544. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64545. template <typename Dispatch>
  64546. VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2KHR( const CopyImageToBufferInfo2KHR & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64547. {
  64548. d.vkCmdCopyImageToBuffer2KHR( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2KHR *>( &copyImageToBufferInfo ) );
  64549. }
  64550. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64551. template <typename Dispatch>
  64552. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64553. {
  64554. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) );
  64555. }
  64556. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64557. template <typename Dispatch>
  64558. VULKAN_HPP_INLINE void CommandBuffer::copyMemoryToAccelerationStructureKHR( const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64559. {
  64560. d.vkCmdCopyMemoryToAccelerationStructureKHR( m_commandBuffer, reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) );
  64561. }
  64562. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64563. template <typename Dispatch>
  64564. VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64565. {
  64566. d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
  64567. }
  64568. template <typename Dispatch>
  64569. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64570. {
  64571. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  64572. }
  64573. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64574. template <typename Dispatch>
  64575. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64576. {
  64577. d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  64578. }
  64579. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64580. template <typename Dispatch>
  64581. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64582. {
  64583. d.vkCmdDebugMarkerEndEXT( m_commandBuffer );
  64584. }
  64585. template <typename Dispatch>
  64586. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64587. {
  64588. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) );
  64589. }
  64590. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64591. template <typename Dispatch>
  64592. VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64593. {
  64594. d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) );
  64595. }
  64596. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64597. template <typename Dispatch>
  64598. VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64599. {
  64600. d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
  64601. }
  64602. template <typename Dispatch>
  64603. VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64604. {
  64605. d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  64606. }
  64607. template <typename Dispatch>
  64608. VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64609. {
  64610. d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
  64611. }
  64612. template <typename Dispatch>
  64613. VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64614. {
  64615. d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
  64616. }
  64617. template <typename Dispatch>
  64618. VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64619. {
  64620. d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
  64621. }
  64622. template <typename Dispatch>
  64623. VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64624. {
  64625. d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
  64626. }
  64627. template <typename Dispatch>
  64628. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64629. {
  64630. d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  64631. }
  64632. template <typename Dispatch>
  64633. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64634. {
  64635. d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64636. }
  64637. template <typename Dispatch>
  64638. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64639. {
  64640. d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64641. }
  64642. template <typename Dispatch>
  64643. VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64644. {
  64645. d.vkCmdDrawIndexedIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64646. }
  64647. template <typename Dispatch>
  64648. VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64649. {
  64650. d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  64651. }
  64652. template <typename Dispatch>
  64653. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64654. {
  64655. d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride );
  64656. }
  64657. template <typename Dispatch>
  64658. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64659. {
  64660. d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64661. }
  64662. template <typename Dispatch>
  64663. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64664. {
  64665. d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64666. }
  64667. template <typename Dispatch>
  64668. VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountKHR( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64669. {
  64670. d.vkCmdDrawIndirectCountKHR( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64671. }
  64672. template <typename Dispatch>
  64673. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectCountNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64674. {
  64675. d.vkCmdDrawMeshTasksIndirectCountNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
  64676. }
  64677. template <typename Dispatch>
  64678. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksIndirectNV( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64679. {
  64680. d.vkCmdDrawMeshTasksIndirectNV( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
  64681. }
  64682. template <typename Dispatch>
  64683. VULKAN_HPP_INLINE void CommandBuffer::drawMeshTasksNV( uint32_t taskCount, uint32_t firstTask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64684. {
  64685. d.vkCmdDrawMeshTasksNV( m_commandBuffer, taskCount, firstTask );
  64686. }
  64687. template <typename Dispatch>
  64688. VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64689. {
  64690. d.vkCmdEndConditionalRenderingEXT( m_commandBuffer );
  64691. }
  64692. template <typename Dispatch>
  64693. VULKAN_HPP_INLINE void CommandBuffer::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64694. {
  64695. d.vkCmdEndDebugUtilsLabelEXT( m_commandBuffer );
  64696. }
  64697. template <typename Dispatch>
  64698. VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64699. {
  64700. d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
  64701. }
  64702. template <typename Dispatch>
  64703. VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64704. {
  64705. d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index );
  64706. }
  64707. template <typename Dispatch>
  64708. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64709. {
  64710. d.vkCmdEndRenderPass( m_commandBuffer );
  64711. }
  64712. template <typename Dispatch>
  64713. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64714. {
  64715. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  64716. }
  64717. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64718. template <typename Dispatch>
  64719. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64720. {
  64721. d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  64722. }
  64723. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64724. template <typename Dispatch>
  64725. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64726. {
  64727. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  64728. }
  64729. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64730. template <typename Dispatch>
  64731. VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64732. {
  64733. d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  64734. }
  64735. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64736. template <typename Dispatch>
  64737. VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer* pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize* pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64738. {
  64739. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) );
  64740. }
  64741. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64742. template <typename Dispatch>
  64743. VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  64744. {
  64745. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  64746. VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() );
  64747. #else
  64748. if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() )
  64749. {
  64750. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" );
  64751. }
  64752. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  64753. d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) );
  64754. }
  64755. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64756. template <typename Dispatch>
  64757. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64758. {
  64759. d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  64760. }
  64761. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64762. template <typename Dispatch>
  64763. VULKAN_HPP_INLINE void CommandBuffer::executeCommands( ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64764. {
  64765. d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  64766. }
  64767. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64768. template <typename Dispatch>
  64769. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64770. {
  64771. d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  64772. }
  64773. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64774. template <typename Dispatch>
  64775. VULKAN_HPP_INLINE void CommandBuffer::executeGeneratedCommandsNV( VULKAN_HPP_NAMESPACE::Bool32 isPreprocessed, const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64776. {
  64777. d.vkCmdExecuteGeneratedCommandsNV( m_commandBuffer, static_cast<VkBool32>( isPreprocessed ), reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  64778. }
  64779. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64780. template <typename Dispatch>
  64781. VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64782. {
  64783. d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
  64784. }
  64785. template <typename Dispatch>
  64786. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64787. {
  64788. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  64789. }
  64790. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64791. template <typename Dispatch>
  64792. VULKAN_HPP_INLINE void CommandBuffer::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64793. {
  64794. d.vkCmdInsertDebugUtilsLabelEXT( m_commandBuffer, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  64795. }
  64796. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64797. template <typename Dispatch>
  64798. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64799. {
  64800. d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
  64801. }
  64802. template <typename Dispatch>
  64803. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64804. {
  64805. d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  64806. }
  64807. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64808. template <typename Dispatch>
  64809. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64810. {
  64811. d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  64812. }
  64813. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64814. template <typename Dispatch>
  64815. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo* pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo* pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64816. {
  64817. d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
  64818. }
  64819. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64820. template <typename Dispatch>
  64821. VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const SubpassBeginInfo & subpassBeginInfo, const SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64822. {
  64823. d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
  64824. }
  64825. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64826. template <typename Dispatch>
  64827. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64828. {
  64829. d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  64830. }
  64831. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64832. template <typename Dispatch>
  64833. VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64834. {
  64835. d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  64836. }
  64837. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64838. template <typename Dispatch>
  64839. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV* pGeneratedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64840. {
  64841. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( pGeneratedCommandsInfo ) );
  64842. }
  64843. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64844. template <typename Dispatch>
  64845. VULKAN_HPP_INLINE void CommandBuffer::preprocessGeneratedCommandsNV( const GeneratedCommandsInfoNV & generatedCommandsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64846. {
  64847. d.vkCmdPreprocessGeneratedCommandsNV( m_commandBuffer, reinterpret_cast<const VkGeneratedCommandsInfoNV *>( &generatedCommandsInfo ) );
  64848. }
  64849. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64850. template <typename Dispatch>
  64851. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64852. {
  64853. d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
  64854. }
  64855. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64856. template <typename T, typename Dispatch>
  64857. VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, ArrayProxy<const T> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64858. {
  64859. d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( T ), reinterpret_cast<const void *>( values.data() ) );
  64860. }
  64861. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64862. template <typename Dispatch>
  64863. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64864. {
  64865. d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) );
  64866. }
  64867. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64868. template <typename Dispatch>
  64869. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64870. {
  64871. d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) );
  64872. }
  64873. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64874. template <typename Dispatch>
  64875. VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64876. {
  64877. d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData );
  64878. }
  64879. template <typename Dispatch>
  64880. VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64881. {
  64882. d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  64883. }
  64884. template <typename Dispatch>
  64885. VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64886. {
  64887. d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  64888. }
  64889. template <typename Dispatch>
  64890. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve* pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64891. {
  64892. d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) );
  64893. }
  64894. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64895. template <typename Dispatch>
  64896. VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64897. {
  64898. d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
  64899. }
  64900. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64901. template <typename Dispatch>
  64902. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR* pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64903. {
  64904. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( pResolveImageInfo ) );
  64905. }
  64906. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64907. template <typename Dispatch>
  64908. VULKAN_HPP_INLINE void CommandBuffer::resolveImage2KHR( const ResolveImageInfo2KHR & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64909. {
  64910. d.vkCmdResolveImage2KHR( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2KHR *>( &resolveImageInfo ) );
  64911. }
  64912. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64913. template <typename Dispatch>
  64914. VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64915. {
  64916. d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
  64917. }
  64918. template <typename Dispatch>
  64919. VULKAN_HPP_INLINE void CommandBuffer::setCheckpointNV( const void* pCheckpointMarker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64920. {
  64921. d.vkCmdSetCheckpointNV( m_commandBuffer, pCheckpointMarker );
  64922. }
  64923. template <typename Dispatch>
  64924. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV* pCustomSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64925. {
  64926. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrderCount, reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( pCustomSampleOrders ) );
  64927. }
  64928. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64929. template <typename Dispatch>
  64930. VULKAN_HPP_INLINE void CommandBuffer::setCoarseSampleOrderNV( VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV sampleOrderType, ArrayProxy<const VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV> const & customSampleOrders, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64931. {
  64932. d.vkCmdSetCoarseSampleOrderNV( m_commandBuffer, static_cast<VkCoarseSampleOrderTypeNV>( sampleOrderType ), customSampleOrders.size(), reinterpret_cast<const VkCoarseSampleOrderCustomNV *>( customSampleOrders.data() ) );
  64933. }
  64934. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64935. template <typename Dispatch>
  64936. VULKAN_HPP_INLINE void CommandBuffer::setCullModeEXT( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64937. {
  64938. d.vkCmdSetCullModeEXT( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) );
  64939. }
  64940. template <typename Dispatch>
  64941. VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64942. {
  64943. d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
  64944. }
  64945. template <typename Dispatch>
  64946. VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64947. {
  64948. d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
  64949. }
  64950. template <typename Dispatch>
  64951. VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64952. {
  64953. d.vkCmdSetDepthBoundsTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) );
  64954. }
  64955. template <typename Dispatch>
  64956. VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOpEXT( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64957. {
  64958. d.vkCmdSetDepthCompareOpEXT( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) );
  64959. }
  64960. template <typename Dispatch>
  64961. VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64962. {
  64963. d.vkCmdSetDepthTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) );
  64964. }
  64965. template <typename Dispatch>
  64966. VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64967. {
  64968. d.vkCmdSetDepthWriteEnableEXT( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) );
  64969. }
  64970. template <typename Dispatch>
  64971. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64972. {
  64973. d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
  64974. }
  64975. template <typename Dispatch>
  64976. VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64977. {
  64978. d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask );
  64979. }
  64980. template <typename Dispatch>
  64981. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D* pDiscardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64982. {
  64983. d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) );
  64984. }
  64985. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  64986. template <typename Dispatch>
  64987. VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64988. {
  64989. d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) );
  64990. }
  64991. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  64992. template <typename Dispatch>
  64993. VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64994. {
  64995. d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
  64996. }
  64997. template <typename Dispatch>
  64998. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pExclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  64999. {
  65000. d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissorCount, reinterpret_cast<const VkRect2D *>( pExclusiveScissors ) );
  65001. }
  65002. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65003. template <typename Dispatch>
  65004. VULKAN_HPP_INLINE void CommandBuffer::setExclusiveScissorNV( uint32_t firstExclusiveScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & exclusiveScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65005. {
  65006. d.vkCmdSetExclusiveScissorNV( m_commandBuffer, firstExclusiveScissor, exclusiveScissors.size(), reinterpret_cast<const VkRect2D *>( exclusiveScissors.data() ) );
  65007. }
  65008. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65009. template <typename Dispatch>
  65010. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateEnumNV( VULKAN_HPP_NAMESPACE::FragmentShadingRateNV shadingRate, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65011. {
  65012. d.vkCmdSetFragmentShadingRateEnumNV( m_commandBuffer, static_cast<VkFragmentShadingRateNV>( shadingRate ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR*>( combinerOps ) );
  65013. }
  65014. template <typename Dispatch>
  65015. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const VULKAN_HPP_NAMESPACE::Extent2D* pFragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65016. {
  65017. d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( pFragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR*>( combinerOps ) );
  65018. }
  65019. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65020. template <typename Dispatch>
  65021. VULKAN_HPP_INLINE void CommandBuffer::setFragmentShadingRateKHR( const Extent2D & fragmentSize, const VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR combinerOps[2], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65022. {
  65023. d.vkCmdSetFragmentShadingRateKHR( m_commandBuffer, reinterpret_cast<const VkExtent2D *>( &fragmentSize ), reinterpret_cast<const VkFragmentShadingRateCombinerOpKHR *>( combinerOps ) );
  65024. }
  65025. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65026. template <typename Dispatch>
  65027. VULKAN_HPP_INLINE void CommandBuffer::setFrontFaceEXT( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65028. {
  65029. d.vkCmdSetFrontFaceEXT( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) );
  65030. }
  65031. template <typename Dispatch>
  65032. VULKAN_HPP_INLINE void CommandBuffer::setLineStippleEXT( uint32_t lineStippleFactor, uint16_t lineStipplePattern, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65033. {
  65034. d.vkCmdSetLineStippleEXT( m_commandBuffer, lineStippleFactor, lineStipplePattern );
  65035. }
  65036. template <typename Dispatch>
  65037. VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65038. {
  65039. d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
  65040. }
  65041. template <typename Dispatch>
  65042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65043. {
  65044. return static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( pMarkerInfo ) ) );
  65045. }
  65046. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65047. template <typename Dispatch>
  65048. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceMarkerINTEL( const PerformanceMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  65049. {
  65050. Result result = static_cast<Result>( d.vkCmdSetPerformanceMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceMarkerInfoINTEL *>( &markerInfo ) ) );
  65051. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceMarkerINTEL" );
  65052. }
  65053. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65054. template <typename Dispatch>
  65055. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceOverrideINTEL( const VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL* pOverrideInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65056. {
  65057. return static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( pOverrideInfo ) ) );
  65058. }
  65059. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65060. template <typename Dispatch>
  65061. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceOverrideINTEL( const PerformanceOverrideInfoINTEL & overrideInfo, Dispatch const & d ) const
  65062. {
  65063. Result result = static_cast<Result>( d.vkCmdSetPerformanceOverrideINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceOverrideInfoINTEL *>( &overrideInfo ) ) );
  65064. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceOverrideINTEL" );
  65065. }
  65066. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65067. template <typename Dispatch>
  65068. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::setPerformanceStreamMarkerINTEL( const VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL* pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65069. {
  65070. return static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( pMarkerInfo ) ) );
  65071. }
  65072. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65073. template <typename Dispatch>
  65074. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::setPerformanceStreamMarkerINTEL( const PerformanceStreamMarkerInfoINTEL & markerInfo, Dispatch const & d ) const
  65075. {
  65076. Result result = static_cast<Result>( d.vkCmdSetPerformanceStreamMarkerINTEL( m_commandBuffer, reinterpret_cast<const VkPerformanceStreamMarkerInfoINTEL *>( &markerInfo ) ) );
  65077. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::setPerformanceStreamMarkerINTEL" );
  65078. }
  65079. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65080. template <typename Dispatch>
  65081. VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopologyEXT( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65082. {
  65083. d.vkCmdSetPrimitiveTopologyEXT( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) );
  65084. }
  65085. template <typename Dispatch>
  65086. VULKAN_HPP_INLINE void CommandBuffer::setRayTracingPipelineStackSizeKHR( uint32_t pipelineStackSize, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65087. {
  65088. d.vkCmdSetRayTracingPipelineStackSizeKHR( m_commandBuffer, pipelineStackSize );
  65089. }
  65090. template <typename Dispatch>
  65091. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT* pSampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65092. {
  65093. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( pSampleLocationsInfo ) );
  65094. }
  65095. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65096. template <typename Dispatch>
  65097. VULKAN_HPP_INLINE void CommandBuffer::setSampleLocationsEXT( const SampleLocationsInfoEXT & sampleLocationsInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65098. {
  65099. d.vkCmdSetSampleLocationsEXT( m_commandBuffer, reinterpret_cast<const VkSampleLocationsInfoEXT *>( &sampleLocationsInfo ) );
  65100. }
  65101. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65102. template <typename Dispatch>
  65103. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65104. {
  65105. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  65106. }
  65107. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65108. template <typename Dispatch>
  65109. VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65110. {
  65111. d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  65112. }
  65113. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65114. template <typename Dispatch>
  65115. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D* pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65116. {
  65117. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
  65118. }
  65119. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65120. template <typename Dispatch>
  65121. VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65122. {
  65123. d.vkCmdSetScissorWithCountEXT( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
  65124. }
  65125. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65126. template <typename Dispatch>
  65127. VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65128. {
  65129. d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
  65130. }
  65131. template <typename Dispatch>
  65132. VULKAN_HPP_INLINE void CommandBuffer::setStencilOpEXT( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65133. {
  65134. d.vkCmdSetStencilOpEXT( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) );
  65135. }
  65136. template <typename Dispatch>
  65137. VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65138. {
  65139. d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
  65140. }
  65141. template <typename Dispatch>
  65142. VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnableEXT( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65143. {
  65144. d.vkCmdSetStencilTestEnableEXT( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) );
  65145. }
  65146. template <typename Dispatch>
  65147. VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65148. {
  65149. d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
  65150. }
  65151. template <typename Dispatch>
  65152. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65153. {
  65154. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  65155. }
  65156. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65157. template <typename Dispatch>
  65158. VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65159. {
  65160. d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  65161. }
  65162. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65163. template <typename Dispatch>
  65164. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV* pShadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65165. {
  65166. d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkShadingRatePaletteNV *>( pShadingRatePalettes ) );
  65167. }
  65168. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65169. template <typename Dispatch>
  65170. VULKAN_HPP_INLINE void CommandBuffer::setViewportShadingRatePaletteNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV> const & shadingRatePalettes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65171. {
  65172. d.vkCmdSetViewportShadingRatePaletteNV( m_commandBuffer, firstViewport, shadingRatePalettes.size(), reinterpret_cast<const VkShadingRatePaletteNV *>( shadingRatePalettes.data() ) );
  65173. }
  65174. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65175. template <typename Dispatch>
  65176. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV* pViewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65177. {
  65178. d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) );
  65179. }
  65180. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65181. template <typename Dispatch>
  65182. VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65183. {
  65184. d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) );
  65185. }
  65186. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65187. template <typename Dispatch>
  65188. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport* pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65189. {
  65190. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
  65191. }
  65192. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65193. template <typename Dispatch>
  65194. VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCountEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65195. {
  65196. d.vkCmdSetViewportWithCountEXT( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
  65197. }
  65198. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65199. template <typename Dispatch>
  65200. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65201. {
  65202. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  65203. }
  65204. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65205. template <typename Dispatch>
  65206. VULKAN_HPP_INLINE void CommandBuffer::traceRaysIndirectKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, VULKAN_HPP_NAMESPACE::DeviceAddress indirectDeviceAddress, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65207. {
  65208. d.vkCmdTraceRaysIndirectKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), static_cast<VkDeviceAddress>( indirectDeviceAddress ) );
  65209. }
  65210. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65211. template <typename Dispatch>
  65212. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65213. {
  65214. d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pRaygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pMissShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pHitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( pCallableShaderBindingTable ), width, height, depth );
  65215. }
  65216. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65217. template <typename Dispatch>
  65218. VULKAN_HPP_INLINE void CommandBuffer::traceRaysKHR( const StridedDeviceAddressRegionKHR & raygenShaderBindingTable, const StridedDeviceAddressRegionKHR & missShaderBindingTable, const StridedDeviceAddressRegionKHR & hitShaderBindingTable, const StridedDeviceAddressRegionKHR & callableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65219. {
  65220. d.vkCmdTraceRaysKHR( m_commandBuffer, reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &raygenShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &missShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &hitShaderBindingTable ), reinterpret_cast<const VkStridedDeviceAddressRegionKHR *>( &callableShaderBindingTable ), width, height, depth );
  65221. }
  65222. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65223. template <typename Dispatch>
  65224. VULKAN_HPP_INLINE void CommandBuffer::traceRaysNV( VULKAN_HPP_NAMESPACE::Buffer raygenShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize raygenShaderBindingOffset, VULKAN_HPP_NAMESPACE::Buffer missShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize missShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer hitShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize hitShaderBindingStride, VULKAN_HPP_NAMESPACE::Buffer callableShaderBindingTableBuffer, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingOffset, VULKAN_HPP_NAMESPACE::DeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65225. {
  65226. d.vkCmdTraceRaysNV( m_commandBuffer, static_cast<VkBuffer>( raygenShaderBindingTableBuffer ), static_cast<VkDeviceSize>( raygenShaderBindingOffset ), static_cast<VkBuffer>( missShaderBindingTableBuffer ), static_cast<VkDeviceSize>( missShaderBindingOffset ), static_cast<VkDeviceSize>( missShaderBindingStride ), static_cast<VkBuffer>( hitShaderBindingTableBuffer ), static_cast<VkDeviceSize>( hitShaderBindingOffset ), static_cast<VkDeviceSize>( hitShaderBindingStride ), static_cast<VkBuffer>( callableShaderBindingTableBuffer ), static_cast<VkDeviceSize>( callableShaderBindingOffset ), static_cast<VkDeviceSize>( callableShaderBindingStride ), width, height, depth );
  65227. }
  65228. template <typename Dispatch>
  65229. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65230. {
  65231. d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
  65232. }
  65233. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65234. template <typename T, typename Dispatch>
  65235. VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, ArrayProxy<const T> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65236. {
  65237. d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( T ), reinterpret_cast<const void *>( data.data() ) );
  65238. }
  65239. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65240. template <typename Dispatch>
  65241. VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event* pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier* pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65242. {
  65243. d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
  65244. }
  65245. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65246. template <typename Dispatch>
  65247. VULKAN_HPP_INLINE void CommandBuffer::waitEvents( ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65248. {
  65249. d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
  65250. }
  65251. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65252. template <typename Dispatch>
  65253. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65254. {
  65255. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
  65256. }
  65257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65258. template <typename Dispatch>
  65259. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65260. {
  65261. d.vkCmdWriteAccelerationStructuresPropertiesKHR( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
  65262. }
  65263. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65264. template <typename Dispatch>
  65265. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65266. {
  65267. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureNV *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
  65268. }
  65269. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65270. template <typename Dispatch>
  65271. VULKAN_HPP_INLINE void CommandBuffer::writeAccelerationStructuresPropertiesNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureNV> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65272. {
  65273. d.vkCmdWriteAccelerationStructuresPropertiesNV( m_commandBuffer, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureNV *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), static_cast<VkQueryPool>( queryPool ), firstQuery );
  65274. }
  65275. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65276. template <typename Dispatch>
  65277. VULKAN_HPP_INLINE void CommandBuffer::writeBufferMarkerAMD( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, uint32_t marker, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65278. {
  65279. d.vkCmdWriteBufferMarkerAMD( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), marker );
  65280. }
  65281. template <typename Dispatch>
  65282. VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65283. {
  65284. d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
  65285. }
  65286. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65287. template <typename Dispatch>
  65288. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65289. {
  65290. return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
  65291. }
  65292. #else
  65293. template <typename Dispatch>
  65294. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
  65295. {
  65296. Result result = static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
  65297. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
  65298. }
  65299. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65300. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65301. template <typename Dispatch>
  65302. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65303. {
  65304. return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
  65305. }
  65306. #else
  65307. template <typename Dispatch>
  65308. VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
  65309. {
  65310. Result result = static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
  65311. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
  65312. }
  65313. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65314. #ifdef VK_USE_PLATFORM_WIN32_KHR
  65315. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65316. template <typename Dispatch>
  65317. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65318. {
  65319. return static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  65320. }
  65321. #else
  65322. template <typename Dispatch>
  65323. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  65324. {
  65325. Result result = static_cast<Result>( d.vkAcquireFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  65326. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireFullScreenExclusiveModeEXT" );
  65327. }
  65328. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65329. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  65330. template <typename Dispatch>
  65331. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65332. {
  65333. return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) );
  65334. }
  65335. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65336. template <typename Dispatch>
  65337. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const
  65338. {
  65339. uint32_t imageIndex;
  65340. Result result = static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ) );
  65341. return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  65342. }
  65343. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65344. template <typename Dispatch>
  65345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t* pImageIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65346. {
  65347. return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) );
  65348. }
  65349. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65350. template <typename Dispatch>
  65351. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  65352. {
  65353. uint32_t imageIndex;
  65354. Result result = static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ) );
  65355. return createResultValue( result, imageIndex, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  65356. }
  65357. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65358. template <typename Dispatch>
  65359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquirePerformanceConfigurationINTEL( const VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL* pConfiguration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65360. {
  65361. return static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( pAcquireInfo ), reinterpret_cast< VkPerformanceConfigurationINTEL *>( pConfiguration ) ) );
  65362. }
  65363. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65364. template <typename Dispatch>
  65365. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::type Device::acquirePerformanceConfigurationINTEL( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
  65366. {
  65367. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  65368. Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
  65369. return createResultValue( result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTEL" );
  65370. }
  65371. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65372. template <typename Dispatch>
  65373. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>>::type Device::acquirePerformanceConfigurationINTELUnique( const PerformanceConfigurationAcquireInfoINTEL & acquireInfo, Dispatch const & d ) const
  65374. {
  65375. VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration;
  65376. Result result = static_cast<Result>( d.vkAcquirePerformanceConfigurationINTEL( m_device, reinterpret_cast<const VkPerformanceConfigurationAcquireInfoINTEL *>( &acquireInfo ), reinterpret_cast<VkPerformanceConfigurationINTEL *>( &configuration ) ) );
  65377. ObjectRelease<Device, Dispatch> deleter( *this, d );
  65378. return createResultValue<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL, Dispatch>( result, configuration, VULKAN_HPP_NAMESPACE_STRING "::Device::acquirePerformanceConfigurationINTELUnique", deleter );
  65379. }
  65380. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65381. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65382. template <typename Dispatch>
  65383. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65384. {
  65385. return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) );
  65386. }
  65387. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65388. template <typename Dispatch>
  65389. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const
  65390. {
  65391. Result result = static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ) );
  65392. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" );
  65393. }
  65394. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65395. template <typename Dispatch>
  65396. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65397. {
  65398. return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast< VkCommandBuffer *>( pCommandBuffers ) ) );
  65399. }
  65400. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65401. template <typename CommandBufferAllocator, typename Dispatch>
  65402. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  65403. {
  65404. std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
  65405. Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
  65406. return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  65407. }
  65408. template <typename CommandBufferAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CommandBuffer>::value, int>::type >
  65409. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
  65410. {
  65411. std::vector<CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
  65412. Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
  65413. return createResultValue( result, commandBuffers, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
  65414. }
  65415. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65416. template <typename Dispatch, typename CommandBufferAllocator>
  65417. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
  65418. {
  65419. std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
  65420. std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  65421. Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
  65422. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  65423. {
  65424. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  65425. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  65426. for ( size_t i=0; i < allocateInfo.commandBufferCount; i++ )
  65427. {
  65428. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
  65429. }
  65430. }
  65431. return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  65432. }
  65433. template <typename Dispatch, typename CommandBufferAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type >
  65434. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
  65435. {
  65436. std::vector<UniqueHandle<CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
  65437. std::vector<CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
  65438. Result result = static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ) );
  65439. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  65440. {
  65441. uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
  65442. PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
  65443. for ( size_t i=0; i < allocateInfo.commandBufferCount; i++ )
  65444. {
  65445. uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffers[i], deleter ) );
  65446. }
  65447. }
  65448. return createResultValue( result, std::move( uniqueCommandBuffers ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
  65449. }
  65450. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65451. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65452. template <typename Dispatch>
  65453. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo* pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65454. {
  65455. return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast< VkDescriptorSet *>( pDescriptorSets ) ) );
  65456. }
  65457. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65458. template <typename DescriptorSetAllocator, typename Dispatch>
  65459. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  65460. {
  65461. std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
  65462. Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
  65463. return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  65464. }
  65465. template <typename DescriptorSetAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DescriptorSet>::value, int>::type >
  65466. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
  65467. {
  65468. std::vector<DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
  65469. Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
  65470. return createResultValue( result, descriptorSets, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
  65471. }
  65472. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65473. template <typename Dispatch, typename DescriptorSetAllocator>
  65474. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
  65475. {
  65476. std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
  65477. std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  65478. Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
  65479. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  65480. {
  65481. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  65482. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  65483. for ( size_t i=0; i < allocateInfo.descriptorSetCount; i++ )
  65484. {
  65485. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
  65486. }
  65487. }
  65488. return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  65489. }
  65490. template <typename Dispatch, typename DescriptorSetAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type >
  65491. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
  65492. {
  65493. std::vector<UniqueHandle<DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
  65494. std::vector<DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
  65495. Result result = static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ) );
  65496. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  65497. {
  65498. uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
  65499. PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
  65500. for ( size_t i=0; i < allocateInfo.descriptorSetCount; i++ )
  65501. {
  65502. uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSets[i], deleter ) );
  65503. }
  65504. }
  65505. return createResultValue( result, std::move( uniqueDescriptorSets ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
  65506. }
  65507. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65508. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65509. template <typename Dispatch>
  65510. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo* pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory* pMemory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65511. {
  65512. return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDeviceMemory *>( pMemory ) ) );
  65513. }
  65514. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65515. template <typename Dispatch>
  65516. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65517. {
  65518. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  65519. Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
  65520. return createResultValue( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
  65521. }
  65522. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65523. template <typename Dispatch>
  65524. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const MemoryAllocateInfo & allocateInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65525. {
  65526. VULKAN_HPP_NAMESPACE::DeviceMemory memory;
  65527. Result result = static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) ) );
  65528. ObjectFree<Device, Dispatch> deleter( *this, allocator, d );
  65529. return createResultValue<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( result, memory, VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique", deleter );
  65530. }
  65531. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65532. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65533. template <typename Dispatch>
  65534. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindAccelerationStructureMemoryNV( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV* pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65535. {
  65536. return static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfoCount, reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( pBindInfos ) ) );
  65537. }
  65538. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65539. template <typename Dispatch>
  65540. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindAccelerationStructureMemoryNV( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV> const & bindInfos, Dispatch const & d ) const
  65541. {
  65542. Result result = static_cast<Result>( d.vkBindAccelerationStructureMemoryNV( m_device, bindInfos.size(), reinterpret_cast<const VkBindAccelerationStructureMemoryInfoNV *>( bindInfos.data() ) ) );
  65543. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindAccelerationStructureMemoryNV" );
  65544. }
  65545. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65546. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65547. template <typename Dispatch>
  65548. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65549. {
  65550. return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  65551. }
  65552. #else
  65553. template <typename Dispatch>
  65554. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  65555. {
  65556. Result result = static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  65557. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
  65558. }
  65559. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65560. template <typename Dispatch>
  65561. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65562. {
  65563. return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  65564. }
  65565. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65566. template <typename Dispatch>
  65567. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
  65568. {
  65569. Result result = static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
  65570. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
  65571. }
  65572. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65573. template <typename Dispatch>
  65574. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo* pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65575. {
  65576. return static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
  65577. }
  65578. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65579. template <typename Dispatch>
  65580. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
  65581. {
  65582. Result result = static_cast<Result>( d.vkBindBufferMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ) );
  65583. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2KHR" );
  65584. }
  65585. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65586. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65587. template <typename Dispatch>
  65588. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65589. {
  65590. return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  65591. }
  65592. #else
  65593. template <typename Dispatch>
  65594. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
  65595. {
  65596. Result result = static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
  65597. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
  65598. }
  65599. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65600. template <typename Dispatch>
  65601. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65602. {
  65603. return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  65604. }
  65605. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65606. template <typename Dispatch>
  65607. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  65608. {
  65609. Result result = static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
  65610. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
  65611. }
  65612. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65613. template <typename Dispatch>
  65614. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2KHR( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo* pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65615. {
  65616. return static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
  65617. }
  65618. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65619. template <typename Dispatch>
  65620. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2KHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
  65621. {
  65622. Result result = static_cast<Result>( d.vkBindImageMemory2KHR( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ) );
  65623. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2KHR" );
  65624. }
  65625. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65626. template <typename Dispatch>
  65627. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, uint32_t infoCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pInfos, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const * ppBuildRangeInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65628. {
  65629. return static_cast<Result>( d.vkBuildAccelerationStructuresKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infoCount, reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pInfos ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( ppBuildRangeInfos ) ) );
  65630. }
  65631. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65632. template <typename Dispatch>
  65633. VULKAN_HPP_INLINE Result Device::buildAccelerationStructuresKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR> const & infos, ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR* const > const & pBuildRangeInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  65634. {
  65635. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  65636. VULKAN_HPP_ASSERT( infos.size() == pBuildRangeInfos.size() );
  65637. #else
  65638. if ( infos.size() != pBuildRangeInfos.size() )
  65639. {
  65640. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR: infos.size() != pBuildRangeInfos.size()" );
  65641. }
  65642. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  65643. Result result = static_cast<Result>( d.vkBuildAccelerationStructuresKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), infos.size(), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( infos.data() ), reinterpret_cast<const VkAccelerationStructureBuildRangeInfoKHR * const *>( pBuildRangeInfos.data() ) ) );
  65644. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::buildAccelerationStructuresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  65645. }
  65646. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65647. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65648. template <typename Dispatch>
  65649. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65650. {
  65651. return static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
  65652. }
  65653. #else
  65654. template <typename Dispatch>
  65655. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::compileDeferredNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t shader, Dispatch const & d ) const
  65656. {
  65657. Result result = static_cast<Result>( d.vkCompileDeferredNV( m_device, static_cast<VkPipeline>( pipeline ), shader ) );
  65658. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::compileDeferredNV" );
  65659. }
  65660. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65661. template <typename Dispatch>
  65662. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65663. {
  65664. return static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( pInfo ) ) );
  65665. }
  65666. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65667. template <typename Dispatch>
  65668. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyAccelerationStructureInfoKHR & info, Dispatch const & d ) const
  65669. {
  65670. Result result = static_cast<Result>( d.vkCopyAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureInfoKHR *>( &info ) ) );
  65671. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  65672. }
  65673. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65674. template <typename Dispatch>
  65675. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65676. {
  65677. return static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( pInfo ) ) );
  65678. }
  65679. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65680. template <typename Dispatch>
  65681. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyAccelerationStructureToMemoryKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyAccelerationStructureToMemoryInfoKHR & info, Dispatch const & d ) const
  65682. {
  65683. Result result = static_cast<Result>( d.vkCopyAccelerationStructureToMemoryKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyAccelerationStructureToMemoryInfoKHR *>( &info ) ) );
  65684. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyAccelerationStructureToMemoryKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  65685. }
  65686. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65687. template <typename Dispatch>
  65688. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65689. {
  65690. return static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( pInfo ) ) );
  65691. }
  65692. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65693. template <typename Dispatch>
  65694. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::copyMemoryToAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, const CopyMemoryToAccelerationStructureInfoKHR & info, Dispatch const & d ) const
  65695. {
  65696. Result result = static_cast<Result>( d.vkCopyMemoryToAccelerationStructureKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), reinterpret_cast<const VkCopyMemoryToAccelerationStructureInfoKHR *>( &info ) ) );
  65697. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::copyMemoryToAccelerationStructureKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR } );
  65698. }
  65699. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65700. template <typename Dispatch>
  65701. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructure, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65702. {
  65703. return static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkAccelerationStructureKHR *>( pAccelerationStructure ) ) );
  65704. }
  65705. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65706. template <typename Dispatch>
  65707. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::type Device::createAccelerationStructureKHR( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65708. {
  65709. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  65710. Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
  65711. return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHR" );
  65712. }
  65713. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65714. template <typename Dispatch>
  65715. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>>::type Device::createAccelerationStructureKHRUnique( const AccelerationStructureCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65716. {
  65717. VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure;
  65718. Result result = static_cast<Result>( d.vkCreateAccelerationStructureKHR( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureKHR *>( &accelerationStructure ) ) );
  65719. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65720. return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR, Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureKHRUnique", deleter );
  65721. }
  65722. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65723. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65724. template <typename Dispatch>
  65725. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createAccelerationStructureNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::AccelerationStructureNV* pAccelerationStructure, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65726. {
  65727. return static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkAccelerationStructureNV *>( pAccelerationStructure ) ) );
  65728. }
  65729. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65730. template <typename Dispatch>
  65731. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::type Device::createAccelerationStructureNV( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65732. {
  65733. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  65734. Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
  65735. return createResultValue( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNV" );
  65736. }
  65737. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65738. template <typename Dispatch>
  65739. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>>::type Device::createAccelerationStructureNVUnique( const AccelerationStructureCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65740. {
  65741. VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure;
  65742. Result result = static_cast<Result>( d.vkCreateAccelerationStructureNV( m_device, reinterpret_cast<const VkAccelerationStructureCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkAccelerationStructureNV *>( &accelerationStructure ) ) );
  65743. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65744. return createResultValue<VULKAN_HPP_NAMESPACE::AccelerationStructureNV, Dispatch>( result, accelerationStructure, VULKAN_HPP_NAMESPACE_STRING "::Device::createAccelerationStructureNVUnique", deleter );
  65745. }
  65746. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65747. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65748. template <typename Dispatch>
  65749. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Buffer* pBuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65750. {
  65751. return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkBuffer *>( pBuffer ) ) );
  65752. }
  65753. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65754. template <typename Dispatch>
  65755. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65756. {
  65757. VULKAN_HPP_NAMESPACE::Buffer buffer;
  65758. Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) ) );
  65759. return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
  65760. }
  65761. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65762. template <typename Dispatch>
  65763. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const BufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65764. {
  65765. VULKAN_HPP_NAMESPACE::Buffer buffer;
  65766. Result result = static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) ) );
  65767. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65768. return createResultValue<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique", deleter );
  65769. }
  65770. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65771. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65772. template <typename Dispatch>
  65773. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::BufferView* pView, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65774. {
  65775. return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkBufferView *>( pView ) ) );
  65776. }
  65777. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65778. template <typename Dispatch>
  65779. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65780. {
  65781. VULKAN_HPP_NAMESPACE::BufferView view;
  65782. Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) ) );
  65783. return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
  65784. }
  65785. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65786. template <typename Dispatch>
  65787. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const BufferViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65788. {
  65789. VULKAN_HPP_NAMESPACE::BufferView view;
  65790. Result result = static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) ) );
  65791. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65792. return createResultValue<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique", deleter );
  65793. }
  65794. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65795. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65796. template <typename Dispatch>
  65797. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::CommandPool* pCommandPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65798. {
  65799. return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkCommandPool *>( pCommandPool ) ) );
  65800. }
  65801. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65802. template <typename Dispatch>
  65803. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65804. {
  65805. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  65806. Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
  65807. return createResultValue( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
  65808. }
  65809. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65810. template <typename Dispatch>
  65811. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const CommandPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65812. {
  65813. VULKAN_HPP_NAMESPACE::CommandPool commandPool;
  65814. Result result = static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) ) );
  65815. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65816. return createResultValue<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( result, commandPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique", deleter );
  65817. }
  65818. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65819. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65820. template <typename Dispatch>
  65821. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65822. {
  65823. return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
  65824. }
  65825. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65826. template <typename PipelineAllocator, typename Dispatch>
  65827. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65828. {
  65829. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  65830. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  65831. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  65832. }
  65833. template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
  65834. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  65835. {
  65836. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  65837. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  65838. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  65839. }
  65840. template <typename Dispatch>
  65841. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65842. {
  65843. Pipeline pipeline;
  65844. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  65845. return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  65846. }
  65847. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65848. template <typename Dispatch, typename PipelineAllocator>
  65849. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65850. {
  65851. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  65852. std::vector<Pipeline> pipelines( createInfos.size() );
  65853. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  65854. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  65855. {
  65856. uniquePipelines.reserve( createInfos.size() );
  65857. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65858. for ( size_t i=0; i < createInfos.size(); i++ )
  65859. {
  65860. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  65861. }
  65862. }
  65863. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  65864. }
  65865. template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
  65866. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  65867. {
  65868. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  65869. std::vector<Pipeline> pipelines( createInfos.size() );
  65870. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  65871. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  65872. {
  65873. uniquePipelines.reserve( createInfos.size() );
  65874. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65875. for ( size_t i=0; i < createInfos.size(); i++ )
  65876. {
  65877. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  65878. }
  65879. }
  65880. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  65881. }
  65882. template <typename Dispatch>
  65883. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const ComputePipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65884. {
  65885. Pipeline pipeline;
  65886. Result result = static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  65887. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65888. return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
  65889. }
  65890. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65891. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65892. template <typename Dispatch>
  65893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDeferredOperationKHR( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DeferredOperationKHR* pDeferredOperation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65894. {
  65895. return static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDeferredOperationKHR *>( pDeferredOperation ) ) );
  65896. }
  65897. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65898. template <typename Dispatch>
  65899. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::type Device::createDeferredOperationKHR( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65900. {
  65901. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  65902. Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
  65903. return createResultValue( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHR" );
  65904. }
  65905. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65906. template <typename Dispatch>
  65907. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>>::type Device::createDeferredOperationKHRUnique( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65908. {
  65909. VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation;
  65910. Result result = static_cast<Result>( d.vkCreateDeferredOperationKHR( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDeferredOperationKHR *>( &deferredOperation ) ) );
  65911. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65912. return createResultValue<VULKAN_HPP_NAMESPACE::DeferredOperationKHR, Dispatch>( result, deferredOperation, VULKAN_HPP_NAMESPACE_STRING "::Device::createDeferredOperationKHRUnique", deleter );
  65913. }
  65914. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65915. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65916. template <typename Dispatch>
  65917. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool* pDescriptorPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65918. {
  65919. return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorPool *>( pDescriptorPool ) ) );
  65920. }
  65921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65922. template <typename Dispatch>
  65923. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65924. {
  65925. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  65926. Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
  65927. return createResultValue( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
  65928. }
  65929. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65930. template <typename Dispatch>
  65931. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const DescriptorPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65932. {
  65933. VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
  65934. Result result = static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ) );
  65935. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65936. return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( result, descriptorPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique", deleter );
  65937. }
  65938. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65939. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65940. template <typename Dispatch>
  65941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout* pSetLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65942. {
  65943. return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorSetLayout *>( pSetLayout ) ) );
  65944. }
  65945. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65946. template <typename Dispatch>
  65947. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65948. {
  65949. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  65950. Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
  65951. return createResultValue( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
  65952. }
  65953. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65954. template <typename Dispatch>
  65955. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const DescriptorSetLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65956. {
  65957. VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
  65958. Result result = static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ) );
  65959. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65960. return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( result, setLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique", deleter );
  65961. }
  65962. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65963. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65964. template <typename Dispatch>
  65965. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65966. {
  65967. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  65968. }
  65969. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65970. template <typename Dispatch>
  65971. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65972. {
  65973. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  65974. Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
  65975. return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
  65976. }
  65977. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  65978. template <typename Dispatch>
  65979. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65980. {
  65981. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  65982. Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
  65983. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  65984. return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique", deleter );
  65985. }
  65986. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  65987. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  65988. template <typename Dispatch>
  65989. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate* pDescriptorUpdateTemplate, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  65990. {
  65991. return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
  65992. }
  65993. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  65994. template <typename Dispatch>
  65995. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  65996. {
  65997. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  65998. Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
  65999. return createResultValue( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" );
  66000. }
  66001. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66002. template <typename Dispatch>
  66003. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66004. {
  66005. VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
  66006. Result result = static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ) );
  66007. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66008. return createResultValue<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( result, descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique", deleter );
  66009. }
  66010. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66011. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66012. template <typename Dispatch>
  66013. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Event* pEvent, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66014. {
  66015. return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkEvent *>( pEvent ) ) );
  66016. }
  66017. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66018. template <typename Dispatch>
  66019. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66020. {
  66021. VULKAN_HPP_NAMESPACE::Event event;
  66022. Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) ) );
  66023. return createResultValue( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
  66024. }
  66025. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66026. template <typename Dispatch>
  66027. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const EventCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66028. {
  66029. VULKAN_HPP_NAMESPACE::Event event;
  66030. Result result = static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) ) );
  66031. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66032. return createResultValue<VULKAN_HPP_NAMESPACE::Event, Dispatch>( result, event, VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique", deleter );
  66033. }
  66034. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66035. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66036. template <typename Dispatch>
  66037. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66038. {
  66039. return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
  66040. }
  66041. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66042. template <typename Dispatch>
  66043. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66044. {
  66045. VULKAN_HPP_NAMESPACE::Fence fence;
  66046. Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  66047. return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
  66048. }
  66049. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66050. template <typename Dispatch>
  66051. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const FenceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66052. {
  66053. VULKAN_HPP_NAMESPACE::Fence fence;
  66054. Result result = static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  66055. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66056. return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique", deleter );
  66057. }
  66058. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66059. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66060. template <typename Dispatch>
  66061. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer* pFramebuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66062. {
  66063. return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFramebuffer *>( pFramebuffer ) ) );
  66064. }
  66065. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66066. template <typename Dispatch>
  66067. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66068. {
  66069. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  66070. Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
  66071. return createResultValue( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
  66072. }
  66073. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66074. template <typename Dispatch>
  66075. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const FramebufferCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66076. {
  66077. VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
  66078. Result result = static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) ) );
  66079. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66080. return createResultValue<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( result, framebuffer, VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique", deleter );
  66081. }
  66082. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66083. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66084. template <typename Dispatch>
  66085. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66086. {
  66087. return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
  66088. }
  66089. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66090. template <typename PipelineAllocator, typename Dispatch>
  66091. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66092. {
  66093. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  66094. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66095. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66096. }
  66097. template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
  66098. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66099. {
  66100. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  66101. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66102. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66103. }
  66104. template <typename Dispatch>
  66105. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66106. {
  66107. Pipeline pipeline;
  66108. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66109. return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66110. }
  66111. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66112. template <typename Dispatch, typename PipelineAllocator>
  66113. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66114. {
  66115. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  66116. std::vector<Pipeline> pipelines( createInfos.size() );
  66117. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66118. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66119. {
  66120. uniquePipelines.reserve( createInfos.size() );
  66121. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66122. for ( size_t i=0; i < createInfos.size(); i++ )
  66123. {
  66124. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66125. }
  66126. }
  66127. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66128. }
  66129. template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
  66130. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66131. {
  66132. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  66133. std::vector<Pipeline> pipelines( createInfos.size() );
  66134. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66135. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66136. {
  66137. uniquePipelines.reserve( createInfos.size() );
  66138. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66139. for ( size_t i=0; i < createInfos.size(); i++ )
  66140. {
  66141. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66142. }
  66143. }
  66144. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66145. }
  66146. template <typename Dispatch>
  66147. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const GraphicsPipelineCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66148. {
  66149. Pipeline pipeline;
  66150. Result result = static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66151. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66152. return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
  66153. }
  66154. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66155. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66156. template <typename Dispatch>
  66157. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Image* pImage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66158. {
  66159. return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkImage *>( pImage ) ) );
  66160. }
  66161. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66162. template <typename Dispatch>
  66163. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66164. {
  66165. VULKAN_HPP_NAMESPACE::Image image;
  66166. Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage *>( &image ) ) );
  66167. return createResultValue( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
  66168. }
  66169. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66170. template <typename Dispatch>
  66171. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const ImageCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66172. {
  66173. VULKAN_HPP_NAMESPACE::Image image;
  66174. Result result = static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImage *>( &image ) ) );
  66175. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66176. return createResultValue<VULKAN_HPP_NAMESPACE::Image, Dispatch>( result, image, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique", deleter );
  66177. }
  66178. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66179. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66180. template <typename Dispatch>
  66181. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ImageView* pView, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66182. {
  66183. return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkImageView *>( pView ) ) );
  66184. }
  66185. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66186. template <typename Dispatch>
  66187. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66188. {
  66189. VULKAN_HPP_NAMESPACE::ImageView view;
  66190. Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) ) );
  66191. return createResultValue( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
  66192. }
  66193. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66194. template <typename Dispatch>
  66195. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const ImageViewCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66196. {
  66197. VULKAN_HPP_NAMESPACE::ImageView view;
  66198. Result result = static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) ) );
  66199. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66200. return createResultValue<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( result, view, VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique", deleter );
  66201. }
  66202. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66203. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66204. template <typename Dispatch>
  66205. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createIndirectCommandsLayoutNV( const VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV* pIndirectCommandsLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66206. {
  66207. return static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkIndirectCommandsLayoutNV *>( pIndirectCommandsLayout ) ) );
  66208. }
  66209. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66210. template <typename Dispatch>
  66211. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::type Device::createIndirectCommandsLayoutNV( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66212. {
  66213. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  66214. Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
  66215. return createResultValue( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNV" );
  66216. }
  66217. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66218. template <typename Dispatch>
  66219. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>>::type Device::createIndirectCommandsLayoutNVUnique( const IndirectCommandsLayoutCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66220. {
  66221. VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout;
  66222. Result result = static_cast<Result>( d.vkCreateIndirectCommandsLayoutNV( m_device, reinterpret_cast<const VkIndirectCommandsLayoutCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkIndirectCommandsLayoutNV *>( &indirectCommandsLayout ) ) );
  66223. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66224. return createResultValue<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV, Dispatch>( result, indirectCommandsLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createIndirectCommandsLayoutNVUnique", deleter );
  66225. }
  66226. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66227. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66228. template <typename Dispatch>
  66229. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache* pPipelineCache, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66230. {
  66231. return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipelineCache *>( pPipelineCache ) ) );
  66232. }
  66233. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66234. template <typename Dispatch>
  66235. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66236. {
  66237. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  66238. Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
  66239. return createResultValue( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
  66240. }
  66241. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66242. template <typename Dispatch>
  66243. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const PipelineCacheCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66244. {
  66245. VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
  66246. Result result = static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ) );
  66247. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66248. return createResultValue<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( result, pipelineCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique", deleter );
  66249. }
  66250. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66251. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66252. template <typename Dispatch>
  66253. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout* pPipelineLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66254. {
  66255. return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipelineLayout *>( pPipelineLayout ) ) );
  66256. }
  66257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66258. template <typename Dispatch>
  66259. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66260. {
  66261. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  66262. Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
  66263. return createResultValue( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
  66264. }
  66265. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66266. template <typename Dispatch>
  66267. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const PipelineLayoutCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66268. {
  66269. VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
  66270. Result result = static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ) );
  66271. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66272. return createResultValue<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( result, pipelineLayout, VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique", deleter );
  66273. }
  66274. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66275. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66276. template <typename Dispatch>
  66277. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlotEXT( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT* pPrivateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66278. {
  66279. return static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPrivateDataSlotEXT *>( pPrivateDataSlot ) ) );
  66280. }
  66281. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66282. template <typename Dispatch>
  66283. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>::type Device::createPrivateDataSlotEXT( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66284. {
  66285. VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
  66286. Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
  66287. return createResultValue( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXT" );
  66288. }
  66289. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66290. template <typename Dispatch>
  66291. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>>::type Device::createPrivateDataSlotEXTUnique( const PrivateDataSlotCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66292. {
  66293. VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot;
  66294. Result result = static_cast<Result>( d.vkCreatePrivateDataSlotEXT( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPrivateDataSlotEXT *>( &privateDataSlot ) ) );
  66295. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66296. return createResultValue<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT, Dispatch>( result, privateDataSlot, VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotEXTUnique", deleter );
  66297. }
  66298. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66299. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66300. template <typename Dispatch>
  66301. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::QueryPool* pQueryPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66302. {
  66303. return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkQueryPool *>( pQueryPool ) ) );
  66304. }
  66305. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66306. template <typename Dispatch>
  66307. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66308. {
  66309. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  66310. Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
  66311. return createResultValue( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
  66312. }
  66313. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66314. template <typename Dispatch>
  66315. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const QueryPoolCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66316. {
  66317. VULKAN_HPP_NAMESPACE::QueryPool queryPool;
  66318. Result result = static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) ) );
  66319. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66320. return createResultValue<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( result, queryPool, VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique", deleter );
  66321. }
  66322. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66323. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66324. template <typename Dispatch>
  66325. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66326. {
  66327. return static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
  66328. }
  66329. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66330. template <typename PipelineAllocator, typename Dispatch>
  66331. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66332. {
  66333. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  66334. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66335. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66336. }
  66337. template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
  66338. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66339. {
  66340. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  66341. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66342. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66343. }
  66344. template <typename Dispatch>
  66345. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66346. {
  66347. Pipeline pipeline;
  66348. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66349. return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66350. }
  66351. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66352. template <typename Dispatch, typename PipelineAllocator>
  66353. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66354. {
  66355. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  66356. std::vector<Pipeline> pipelines( createInfos.size() );
  66357. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66358. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66359. {
  66360. uniquePipelines.reserve( createInfos.size() );
  66361. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66362. for ( size_t i=0; i < createInfos.size(); i++ )
  66363. {
  66364. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66365. }
  66366. }
  66367. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66368. }
  66369. template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
  66370. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66371. {
  66372. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  66373. std::vector<Pipeline> pipelines( createInfos.size() );
  66374. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66375. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66376. {
  66377. uniquePipelines.reserve( createInfos.size() );
  66378. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66379. for ( size_t i=0; i < createInfos.size(); i++ )
  66380. {
  66381. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66382. }
  66383. }
  66384. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66385. }
  66386. template <typename Dispatch>
  66387. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineKHRUnique( VULKAN_HPP_NAMESPACE::DeferredOperationKHR deferredOperation, VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66388. {
  66389. Pipeline pipeline;
  66390. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesKHR( m_device, static_cast<VkDeferredOperationKHR>( deferredOperation ), static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66391. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66392. return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineKHRUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eOperationDeferredKHR, VULKAN_HPP_NAMESPACE::Result::eOperationNotDeferredKHR, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
  66393. }
  66394. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66395. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66396. template <typename Dispatch>
  66397. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Pipeline* pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66398. {
  66399. return static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkPipeline *>( pPipelines ) ) );
  66400. }
  66401. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66402. template <typename PipelineAllocator, typename Dispatch>
  66403. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66404. {
  66405. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size() );
  66406. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66407. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66408. }
  66409. template <typename PipelineAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Pipeline>::value, int>::type >
  66410. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<Pipeline, PipelineAllocator>> Device::createRayTracingPipelinesNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66411. {
  66412. std::vector<Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
  66413. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66414. return createResultValue( result, pipelines, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66415. }
  66416. template <typename Dispatch>
  66417. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<Pipeline> Device::createRayTracingPipelineNV( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66418. {
  66419. Pipeline pipeline;
  66420. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66421. return createResultValue( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNV", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66422. }
  66423. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66424. template <typename Dispatch, typename PipelineAllocator>
  66425. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66426. {
  66427. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
  66428. std::vector<Pipeline> pipelines( createInfos.size() );
  66429. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66430. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66431. {
  66432. uniquePipelines.reserve( createInfos.size() );
  66433. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66434. for ( size_t i=0; i < createInfos.size(); i++ )
  66435. {
  66436. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66437. }
  66438. }
  66439. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66440. }
  66441. template <typename Dispatch, typename PipelineAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type >
  66442. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator>> Device::createRayTracingPipelinesNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV> const & createInfos, Optional<const AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
  66443. {
  66444. std::vector<UniqueHandle<Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
  66445. std::vector<Pipeline> pipelines( createInfos.size() );
  66446. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ) );
  66447. if ( ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )|| ( result == VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT ) )
  66448. {
  66449. uniquePipelines.reserve( createInfos.size() );
  66450. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66451. for ( size_t i=0; i < createInfos.size(); i++ )
  66452. {
  66453. uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipelines[i], deleter ) );
  66454. }
  66455. }
  66456. return createResultValue( result, std::move( uniquePipelines ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelinesNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
  66457. }
  66458. template <typename Dispatch>
  66459. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<Pipeline, Dispatch>> Device::createRayTracingPipelineNVUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const RayTracingPipelineCreateInfoNV & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66460. {
  66461. Pipeline pipeline;
  66462. Result result = static_cast<Result>( d.vkCreateRayTracingPipelinesNV( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkRayTracingPipelineCreateInfoNV *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ) );
  66463. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66464. return createResultValue<Pipeline, Dispatch>( result, pipeline, VULKAN_HPP_NAMESPACE_STRING "::Device::createRayTracingPipelineNVUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT }, deleter );
  66465. }
  66466. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66467. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66468. template <typename Dispatch>
  66469. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66470. {
  66471. return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
  66472. }
  66473. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66474. template <typename Dispatch>
  66475. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66476. {
  66477. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66478. Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66479. return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
  66480. }
  66481. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66482. template <typename Dispatch>
  66483. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const RenderPassCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66484. {
  66485. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66486. Result result = static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66487. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66488. return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique", deleter );
  66489. }
  66490. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66491. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66492. template <typename Dispatch>
  66493. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66494. {
  66495. return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
  66496. }
  66497. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66498. template <typename Dispatch>
  66499. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66500. {
  66501. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66502. Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66503. return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
  66504. }
  66505. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66506. template <typename Dispatch>
  66507. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66508. {
  66509. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66510. Result result = static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66511. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66512. return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique", deleter );
  66513. }
  66514. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66515. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66516. template <typename Dispatch>
  66517. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::RenderPass* pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66518. {
  66519. return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkRenderPass *>( pRenderPass ) ) );
  66520. }
  66521. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66522. template <typename Dispatch>
  66523. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66524. {
  66525. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66526. Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66527. return createResultValue( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" );
  66528. }
  66529. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66530. template <typename Dispatch>
  66531. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2KHRUnique( const RenderPassCreateInfo2 & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66532. {
  66533. VULKAN_HPP_NAMESPACE::RenderPass renderPass;
  66534. Result result = static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ) );
  66535. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66536. return createResultValue<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( result, renderPass, VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique", deleter );
  66537. }
  66538. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66539. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66540. template <typename Dispatch>
  66541. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Sampler* pSampler, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66542. {
  66543. return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSampler *>( pSampler ) ) );
  66544. }
  66545. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66546. template <typename Dispatch>
  66547. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66548. {
  66549. VULKAN_HPP_NAMESPACE::Sampler sampler;
  66550. Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) ) );
  66551. return createResultValue( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
  66552. }
  66553. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66554. template <typename Dispatch>
  66555. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const SamplerCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66556. {
  66557. VULKAN_HPP_NAMESPACE::Sampler sampler;
  66558. Result result = static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) ) );
  66559. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66560. return createResultValue<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( result, sampler, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique", deleter );
  66561. }
  66562. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66563. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66564. template <typename Dispatch>
  66565. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66566. {
  66567. return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  66568. }
  66569. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66570. template <typename Dispatch>
  66571. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66572. {
  66573. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  66574. Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
  66575. return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
  66576. }
  66577. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66578. template <typename Dispatch>
  66579. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66580. {
  66581. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  66582. Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
  66583. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66584. return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique", deleter );
  66585. }
  66586. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66587. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66588. template <typename Dispatch>
  66589. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversionKHR( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion* pYcbcrConversion, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66590. {
  66591. return static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
  66592. }
  66593. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66594. template <typename Dispatch>
  66595. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversionKHR( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66596. {
  66597. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  66598. Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
  66599. return createResultValue( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHR" );
  66600. }
  66601. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66602. template <typename Dispatch>
  66603. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionKHRUnique( const SamplerYcbcrConversionCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66604. {
  66605. VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
  66606. Result result = static_cast<Result>( d.vkCreateSamplerYcbcrConversionKHR( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ) );
  66607. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66608. return createResultValue<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( result, ycbcrConversion, VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionKHRUnique", deleter );
  66609. }
  66610. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66611. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66612. template <typename Dispatch>
  66613. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Semaphore* pSemaphore, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66614. {
  66615. return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSemaphore *>( pSemaphore ) ) );
  66616. }
  66617. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66618. template <typename Dispatch>
  66619. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66620. {
  66621. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  66622. Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
  66623. return createResultValue( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
  66624. }
  66625. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66626. template <typename Dispatch>
  66627. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const SemaphoreCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66628. {
  66629. VULKAN_HPP_NAMESPACE::Semaphore semaphore;
  66630. Result result = static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) ) );
  66631. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66632. return createResultValue<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( result, semaphore, VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique", deleter );
  66633. }
  66634. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66635. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66636. template <typename Dispatch>
  66637. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule* pShaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66638. {
  66639. return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkShaderModule *>( pShaderModule ) ) );
  66640. }
  66641. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66642. template <typename Dispatch>
  66643. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66644. {
  66645. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  66646. Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
  66647. return createResultValue( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
  66648. }
  66649. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66650. template <typename Dispatch>
  66651. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type Device::createShaderModuleUnique( const ShaderModuleCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66652. {
  66653. VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
  66654. Result result = static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) ) );
  66655. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66656. return createResultValue<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( result, shaderModule, VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique", deleter );
  66657. }
  66658. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66659. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66660. template <typename Dispatch>
  66661. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66662. {
  66663. return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSwapchainKHR *>( pSwapchains ) ) );
  66664. }
  66665. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66666. template <typename SwapchainKHRAllocator, typename Dispatch>
  66667. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66668. {
  66669. std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() );
  66670. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
  66671. return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  66672. }
  66673. template <typename SwapchainKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SwapchainKHR>::value, int>::type >
  66674. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
  66675. {
  66676. std::vector<SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator );
  66677. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
  66678. return createResultValue( result, swapchains, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" );
  66679. }
  66680. template <typename Dispatch>
  66681. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<SwapchainKHR>::type Device::createSharedSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66682. {
  66683. SwapchainKHR swapchain;
  66684. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
  66685. return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" );
  66686. }
  66687. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66688. template <typename Dispatch, typename SwapchainKHRAllocator>
  66689. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66690. {
  66691. std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains;
  66692. std::vector<SwapchainKHR> swapchains( createInfos.size() );
  66693. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
  66694. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  66695. {
  66696. uniqueSwapchains.reserve( createInfos.size() );
  66697. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66698. for ( size_t i=0; i < createInfos.size(); i++ )
  66699. {
  66700. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
  66701. }
  66702. }
  66703. return createResultValue( result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  66704. }
  66705. template <typename Dispatch, typename SwapchainKHRAllocator, typename B, typename std::enable_if<std::is_same<typename B::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type >
  66706. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const
  66707. {
  66708. std::vector<UniqueHandle<SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator );
  66709. std::vector<SwapchainKHR> swapchains( createInfos.size() );
  66710. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ) );
  66711. if ( result == VULKAN_HPP_NAMESPACE::Result::eSuccess )
  66712. {
  66713. uniqueSwapchains.reserve( createInfos.size() );
  66714. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66715. for ( size_t i=0; i < createInfos.size(); i++ )
  66716. {
  66717. uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchains[i], deleter ) );
  66718. }
  66719. }
  66720. return createResultValue( result, std::move( uniqueSwapchains ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" );
  66721. }
  66722. template <typename Dispatch>
  66723. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66724. {
  66725. SwapchainKHR swapchain;
  66726. Result result = static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
  66727. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66728. return createResultValue<SwapchainKHR, Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique", deleter );
  66729. }
  66730. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66731. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66732. template <typename Dispatch>
  66733. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66734. {
  66735. return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSwapchainKHR *>( pSwapchain ) ) );
  66736. }
  66737. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66738. template <typename Dispatch>
  66739. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66740. {
  66741. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  66742. Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
  66743. return createResultValue( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" );
  66744. }
  66745. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66746. template <typename Dispatch>
  66747. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSwapchainKHRUnique( const SwapchainCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66748. {
  66749. VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain;
  66750. Result result = static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ) );
  66751. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66752. return createResultValue<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( result, swapchain, VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique", deleter );
  66753. }
  66754. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66755. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66756. template <typename Dispatch>
  66757. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createValidationCacheEXT( const VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pValidationCache, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66758. {
  66759. return static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkValidationCacheEXT *>( pValidationCache ) ) );
  66760. }
  66761. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66762. template <typename Dispatch>
  66763. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::type Device::createValidationCacheEXT( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66764. {
  66765. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  66766. Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
  66767. return createResultValue( result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXT" );
  66768. }
  66769. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  66770. template <typename Dispatch>
  66771. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>>::type Device::createValidationCacheEXTUnique( const ValidationCacheCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  66772. {
  66773. VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache;
  66774. Result result = static_cast<Result>( d.vkCreateValidationCacheEXT( m_device, reinterpret_cast<const VkValidationCacheCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkValidationCacheEXT *>( &validationCache ) ) );
  66775. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  66776. return createResultValue<VULKAN_HPP_NAMESPACE::ValidationCacheEXT, Dispatch>( result, validationCache, VULKAN_HPP_NAMESPACE_STRING "::Device::createValidationCacheEXTUnique", deleter );
  66777. }
  66778. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  66779. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66780. template <typename Dispatch>
  66781. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT* pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66782. {
  66783. return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) );
  66784. }
  66785. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66786. template <typename Dispatch>
  66787. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  66788. {
  66789. Result result = static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ) );
  66790. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" );
  66791. }
  66792. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66793. template <typename Dispatch>
  66794. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT* pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66795. {
  66796. return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) );
  66797. }
  66798. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66799. template <typename Dispatch>
  66800. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  66801. {
  66802. Result result = static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ) );
  66803. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" );
  66804. }
  66805. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66806. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66807. template <typename Dispatch>
  66808. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66809. {
  66810. return static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  66811. }
  66812. #else
  66813. template <typename Dispatch>
  66814. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::deferredOperationJoinKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
  66815. {
  66816. Result result = static_cast<Result>( d.vkDeferredOperationJoinKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  66817. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::deferredOperationJoinKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eThreadDoneKHR, VULKAN_HPP_NAMESPACE::Result::eThreadIdleKHR } );
  66818. }
  66819. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66820. template <typename Dispatch>
  66821. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66822. {
  66823. d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66824. }
  66825. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66826. template <typename Dispatch>
  66827. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66828. {
  66829. d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66830. }
  66831. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66832. template <typename Dispatch>
  66833. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66834. {
  66835. d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66836. }
  66837. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66838. template <typename Dispatch>
  66839. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66840. {
  66841. d.vkDestroyAccelerationStructureKHR( m_device, static_cast<VkAccelerationStructureKHR>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66842. }
  66843. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66844. template <typename Dispatch>
  66845. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66846. {
  66847. d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66848. }
  66849. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66850. template <typename Dispatch>
  66851. VULKAN_HPP_INLINE void Device::destroyAccelerationStructureNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66852. {
  66853. d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66854. }
  66855. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66856. template <typename Dispatch>
  66857. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66858. {
  66859. d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66860. }
  66861. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66862. template <typename Dispatch>
  66863. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66864. {
  66865. d.vkDestroyAccelerationStructureNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66866. }
  66867. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66868. template <typename Dispatch>
  66869. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66870. {
  66871. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66872. }
  66873. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66874. template <typename Dispatch>
  66875. VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66876. {
  66877. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66878. }
  66879. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66880. template <typename Dispatch>
  66881. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66882. {
  66883. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66884. }
  66885. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66886. template <typename Dispatch>
  66887. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66888. {
  66889. d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66890. }
  66891. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66892. template <typename Dispatch>
  66893. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66894. {
  66895. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66896. }
  66897. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66898. template <typename Dispatch>
  66899. VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66900. {
  66901. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66902. }
  66903. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66904. template <typename Dispatch>
  66905. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66906. {
  66907. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66908. }
  66909. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66910. template <typename Dispatch>
  66911. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66912. {
  66913. d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66914. }
  66915. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66916. template <typename Dispatch>
  66917. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66918. {
  66919. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66920. }
  66921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66922. template <typename Dispatch>
  66923. VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66924. {
  66925. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66926. }
  66927. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66928. template <typename Dispatch>
  66929. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66930. {
  66931. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66932. }
  66933. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66934. template <typename Dispatch>
  66935. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66936. {
  66937. d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66938. }
  66939. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66940. template <typename Dispatch>
  66941. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66942. {
  66943. d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66944. }
  66945. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66946. template <typename Dispatch>
  66947. VULKAN_HPP_INLINE void Device::destroyDeferredOperationKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66948. {
  66949. d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66950. }
  66951. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66952. template <typename Dispatch>
  66953. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66954. {
  66955. d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66956. }
  66957. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66958. template <typename Dispatch>
  66959. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66960. {
  66961. d.vkDestroyDeferredOperationKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66962. }
  66963. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66964. template <typename Dispatch>
  66965. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66966. {
  66967. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66968. }
  66969. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66970. template <typename Dispatch>
  66971. VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66972. {
  66973. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66974. }
  66975. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66976. template <typename Dispatch>
  66977. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66978. {
  66979. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66980. }
  66981. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66982. template <typename Dispatch>
  66983. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66984. {
  66985. d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66986. }
  66987. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  66988. template <typename Dispatch>
  66989. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66990. {
  66991. d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  66992. }
  66993. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  66994. template <typename Dispatch>
  66995. VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  66996. {
  66997. d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  66998. }
  66999. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67000. template <typename Dispatch>
  67001. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67002. {
  67003. d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67004. }
  67005. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67006. template <typename Dispatch>
  67007. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67008. {
  67009. d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67010. }
  67011. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67012. template <typename Dispatch>
  67013. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67014. {
  67015. d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67016. }
  67017. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67018. template <typename Dispatch>
  67019. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67020. {
  67021. d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67022. }
  67023. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67024. template <typename Dispatch>
  67025. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67026. {
  67027. d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67028. }
  67029. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67030. template <typename Dispatch>
  67031. VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67032. {
  67033. d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67034. }
  67035. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67036. template <typename Dispatch>
  67037. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67038. {
  67039. d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67040. }
  67041. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67042. template <typename Dispatch>
  67043. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67044. {
  67045. d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67046. }
  67047. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67048. template <typename Dispatch>
  67049. VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67050. {
  67051. d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67052. }
  67053. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67054. template <typename Dispatch>
  67055. VULKAN_HPP_INLINE void Device::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67056. {
  67057. d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67058. }
  67059. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67060. template <typename Dispatch>
  67061. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67062. {
  67063. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67064. }
  67065. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67066. template <typename Dispatch>
  67067. VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67068. {
  67069. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67070. }
  67071. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67072. template <typename Dispatch>
  67073. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67074. {
  67075. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67076. }
  67077. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67078. template <typename Dispatch>
  67079. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67080. {
  67081. d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67082. }
  67083. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67084. template <typename Dispatch>
  67085. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67086. {
  67087. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67088. }
  67089. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67090. template <typename Dispatch>
  67091. VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67092. {
  67093. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67094. }
  67095. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67096. template <typename Dispatch>
  67097. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67098. {
  67099. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67100. }
  67101. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67102. template <typename Dispatch>
  67103. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67104. {
  67105. d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67106. }
  67107. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67108. template <typename Dispatch>
  67109. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67110. {
  67111. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67112. }
  67113. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67114. template <typename Dispatch>
  67115. VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67116. {
  67117. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67118. }
  67119. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67120. template <typename Dispatch>
  67121. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67122. {
  67123. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67124. }
  67125. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67126. template <typename Dispatch>
  67127. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67128. {
  67129. d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67130. }
  67131. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67132. template <typename Dispatch>
  67133. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67134. {
  67135. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67136. }
  67137. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67138. template <typename Dispatch>
  67139. VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67140. {
  67141. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67142. }
  67143. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67144. template <typename Dispatch>
  67145. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67146. {
  67147. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67148. }
  67149. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67150. template <typename Dispatch>
  67151. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67152. {
  67153. d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67154. }
  67155. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67156. template <typename Dispatch>
  67157. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67158. {
  67159. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67160. }
  67161. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67162. template <typename Dispatch>
  67163. VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67164. {
  67165. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67166. }
  67167. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67168. template <typename Dispatch>
  67169. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67170. {
  67171. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67172. }
  67173. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67174. template <typename Dispatch>
  67175. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67176. {
  67177. d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67178. }
  67179. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67180. template <typename Dispatch>
  67181. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67182. {
  67183. d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67184. }
  67185. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67186. template <typename Dispatch>
  67187. VULKAN_HPP_INLINE void Device::destroyIndirectCommandsLayoutNV( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67188. {
  67189. d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67190. }
  67191. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67192. template <typename Dispatch>
  67193. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67194. {
  67195. d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67196. }
  67197. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67198. template <typename Dispatch>
  67199. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV indirectCommandsLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67200. {
  67201. d.vkDestroyIndirectCommandsLayoutNV( m_device, static_cast<VkIndirectCommandsLayoutNV>( indirectCommandsLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67202. }
  67203. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67204. template <typename Dispatch>
  67205. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67206. {
  67207. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67208. }
  67209. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67210. template <typename Dispatch>
  67211. VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67212. {
  67213. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67214. }
  67215. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67216. template <typename Dispatch>
  67217. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67218. {
  67219. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67220. }
  67221. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67222. template <typename Dispatch>
  67223. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67224. {
  67225. d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67226. }
  67227. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67228. template <typename Dispatch>
  67229. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67230. {
  67231. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67232. }
  67233. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67234. template <typename Dispatch>
  67235. VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67236. {
  67237. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67238. }
  67239. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67240. template <typename Dispatch>
  67241. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67242. {
  67243. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67244. }
  67245. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67246. template <typename Dispatch>
  67247. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67248. {
  67249. d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67250. }
  67251. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67252. template <typename Dispatch>
  67253. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67254. {
  67255. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67256. }
  67257. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67258. template <typename Dispatch>
  67259. VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67260. {
  67261. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67262. }
  67263. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67264. template <typename Dispatch>
  67265. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67266. {
  67267. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67268. }
  67269. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67270. template <typename Dispatch>
  67271. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67272. {
  67273. d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67274. }
  67275. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67276. template <typename Dispatch>
  67277. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67278. {
  67279. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67280. }
  67281. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67282. template <typename Dispatch>
  67283. VULKAN_HPP_INLINE void Device::destroyPrivateDataSlotEXT( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67284. {
  67285. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67286. }
  67287. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67288. template <typename Dispatch>
  67289. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67290. {
  67291. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67292. }
  67293. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67294. template <typename Dispatch>
  67295. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67296. {
  67297. d.vkDestroyPrivateDataSlotEXT( m_device, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67298. }
  67299. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67300. template <typename Dispatch>
  67301. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67302. {
  67303. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67304. }
  67305. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67306. template <typename Dispatch>
  67307. VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67308. {
  67309. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67310. }
  67311. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67312. template <typename Dispatch>
  67313. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67314. {
  67315. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67316. }
  67317. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67318. template <typename Dispatch>
  67319. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67320. {
  67321. d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67322. }
  67323. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67324. template <typename Dispatch>
  67325. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67326. {
  67327. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67328. }
  67329. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67330. template <typename Dispatch>
  67331. VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67332. {
  67333. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67334. }
  67335. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67336. template <typename Dispatch>
  67337. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67338. {
  67339. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67340. }
  67341. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67342. template <typename Dispatch>
  67343. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67344. {
  67345. d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67346. }
  67347. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67348. template <typename Dispatch>
  67349. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67350. {
  67351. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67352. }
  67353. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67354. template <typename Dispatch>
  67355. VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67356. {
  67357. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67358. }
  67359. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67360. template <typename Dispatch>
  67361. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67362. {
  67363. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67364. }
  67365. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67366. template <typename Dispatch>
  67367. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67368. {
  67369. d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67370. }
  67371. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67372. template <typename Dispatch>
  67373. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67374. {
  67375. d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67376. }
  67377. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67378. template <typename Dispatch>
  67379. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67380. {
  67381. d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67382. }
  67383. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67384. template <typename Dispatch>
  67385. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67386. {
  67387. d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67388. }
  67389. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67390. template <typename Dispatch>
  67391. VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversionKHR( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67392. {
  67393. d.vkDestroySamplerYcbcrConversionKHR( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67394. }
  67395. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67396. template <typename Dispatch>
  67397. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67398. {
  67399. d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67400. }
  67401. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67402. template <typename Dispatch>
  67403. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67404. {
  67405. d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67406. }
  67407. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67408. template <typename Dispatch>
  67409. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67410. {
  67411. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67412. }
  67413. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67414. template <typename Dispatch>
  67415. VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67416. {
  67417. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67418. }
  67419. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67420. template <typename Dispatch>
  67421. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67422. {
  67423. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67424. }
  67425. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67426. template <typename Dispatch>
  67427. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67428. {
  67429. d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67430. }
  67431. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67432. template <typename Dispatch>
  67433. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67434. {
  67435. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67436. }
  67437. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67438. template <typename Dispatch>
  67439. VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67440. {
  67441. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67442. }
  67443. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67444. template <typename Dispatch>
  67445. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67446. {
  67447. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67448. }
  67449. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67450. template <typename Dispatch>
  67451. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67452. {
  67453. d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67454. }
  67455. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67456. template <typename Dispatch>
  67457. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67458. {
  67459. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67460. }
  67461. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67462. template <typename Dispatch>
  67463. VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67464. {
  67465. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67466. }
  67467. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67468. template <typename Dispatch>
  67469. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67470. {
  67471. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67472. }
  67473. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67474. template <typename Dispatch>
  67475. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67476. {
  67477. d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67478. }
  67479. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67480. template <typename Dispatch>
  67481. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67482. {
  67483. d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67484. }
  67485. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67486. template <typename Dispatch>
  67487. VULKAN_HPP_INLINE void Device::destroyValidationCacheEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67488. {
  67489. d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67490. }
  67491. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67492. template <typename Dispatch>
  67493. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67494. {
  67495. d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67496. }
  67497. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67498. template <typename Dispatch>
  67499. VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67500. {
  67501. d.vkDestroyValidationCacheEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67502. }
  67503. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67504. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67505. template <typename Dispatch>
  67506. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67507. {
  67508. return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
  67509. }
  67510. #else
  67511. template <typename Dispatch>
  67512. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
  67513. {
  67514. Result result = static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
  67515. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
  67516. }
  67517. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67518. template <typename Dispatch>
  67519. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT* pDisplayPowerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67520. {
  67521. return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) );
  67522. }
  67523. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67524. template <typename Dispatch>
  67525. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const
  67526. {
  67527. Result result = static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ) );
  67528. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" );
  67529. }
  67530. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67531. template <typename Dispatch>
  67532. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67533. {
  67534. return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  67535. }
  67536. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67537. template <typename Dispatch>
  67538. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
  67539. {
  67540. Result result = static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
  67541. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
  67542. }
  67543. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67544. template <typename Dispatch>
  67545. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67546. {
  67547. d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  67548. }
  67549. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67550. template <typename Dispatch>
  67551. VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67552. {
  67553. d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  67554. }
  67555. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67556. template <typename Dispatch>
  67557. VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer* pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67558. {
  67559. d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
  67560. }
  67561. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67562. template <typename Dispatch>
  67563. VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::CommandPool commandPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67564. {
  67565. d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
  67566. }
  67567. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67568. template <typename Dispatch>
  67569. VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67570. {
  67571. return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  67572. }
  67573. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67574. template <typename Dispatch>
  67575. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const
  67576. {
  67577. Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
  67578. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::freeDescriptorSets" );
  67579. }
  67580. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67581. template <typename Dispatch>
  67582. VULKAN_HPP_INLINE Result Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet* pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67583. {
  67584. return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
  67585. }
  67586. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67587. template <typename Dispatch>
  67588. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::free( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const
  67589. {
  67590. Result result = static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ) );
  67591. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::free" );
  67592. }
  67593. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67594. template <typename Dispatch>
  67595. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67596. {
  67597. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67598. }
  67599. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67600. template <typename Dispatch>
  67601. VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67602. {
  67603. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67604. }
  67605. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67606. template <typename Dispatch>
  67607. VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67608. {
  67609. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  67610. }
  67611. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67612. template <typename Dispatch>
  67613. VULKAN_HPP_INLINE void Device::free( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67614. {
  67615. d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  67616. }
  67617. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67618. template <typename Dispatch>
  67619. VULKAN_HPP_INLINE void Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR* pSizeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67620. {
  67621. d.vkGetAccelerationStructureBuildSizesKHR( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( pBuildInfo ), pMaxPrimitiveCounts, reinterpret_cast< VkAccelerationStructureBuildSizesInfoKHR *>( pSizeInfo ) );
  67622. }
  67623. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67624. template <typename Dispatch>
  67625. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR Device::getAccelerationStructureBuildSizesKHR( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR buildType, const AccelerationStructureBuildGeometryInfoKHR & buildInfo, ArrayProxy<const uint32_t> const & maxPrimitiveCounts, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  67626. {
  67627. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  67628. VULKAN_HPP_ASSERT( maxPrimitiveCounts.size() == buildInfo.geometryCount );
  67629. #else
  67630. if ( maxPrimitiveCounts.size() != buildInfo.geometryCount )
  67631. {
  67632. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureBuildSizesKHR: maxPrimitiveCounts.size() != buildInfo.geometryCount" );
  67633. }
  67634. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  67635. VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR sizeInfo;
  67636. d.vkGetAccelerationStructureBuildSizesKHR( m_device, static_cast<VkAccelerationStructureBuildTypeKHR>( buildType ), reinterpret_cast<const VkAccelerationStructureBuildGeometryInfoKHR *>( &buildInfo ), maxPrimitiveCounts.data(), reinterpret_cast<VkAccelerationStructureBuildSizesInfoKHR *>( &sizeInfo ) );
  67637. return sizeInfo;
  67638. }
  67639. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67640. template <typename Dispatch>
  67641. VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67642. {
  67643. return static_cast<DeviceAddress>( d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( pInfo ) ) );
  67644. }
  67645. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67646. template <typename Dispatch>
  67647. VULKAN_HPP_INLINE DeviceAddress Device::getAccelerationStructureAddressKHR( const AccelerationStructureDeviceAddressInfoKHR & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67648. {
  67649. return d.vkGetAccelerationStructureDeviceAddressKHR( m_device, reinterpret_cast<const VkAccelerationStructureDeviceAddressInfoKHR *>( &info ) );
  67650. }
  67651. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67652. template <typename Dispatch>
  67653. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67654. {
  67655. return static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), dataSize, pData ) );
  67656. }
  67657. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67658. template <typename T, typename Dispatch>
  67659. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  67660. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, ArrayProxy<T> const &data, Dispatch const &d ) const
  67661. {
  67662. Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
  67663. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getAccelerationStructureHandleNV" );
  67664. }
  67665. template <typename T, typename Allocator, typename Dispatch>
  67666. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, size_t dataSize, Dispatch const & d ) const
  67667. {
  67668. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  67669. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  67670. Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ) ) );
  67671. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  67672. }
  67673. template <typename T, typename Dispatch>
  67674. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getAccelerationStructureHandleNV( VULKAN_HPP_NAMESPACE::AccelerationStructureNV accelerationStructure, Dispatch const & d ) const
  67675. {
  67676. T data;
  67677. Result result = static_cast<Result>( d.vkGetAccelerationStructureHandleNV( m_device, static_cast<VkAccelerationStructureNV>( accelerationStructure ), sizeof( T ), reinterpret_cast<void *>( &data ) ) );
  67678. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getAccelerationStructureHandleNV" );
  67679. }
  67680. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67681. template <typename Dispatch>
  67682. VULKAN_HPP_INLINE void Device::getAccelerationStructureMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67683. {
  67684. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast< VkMemoryRequirements2KHR *>( pMemoryRequirements ) );
  67685. }
  67686. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67687. template <typename Dispatch>
  67688. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67689. {
  67690. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR memoryRequirements;
  67691. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  67692. return memoryRequirements;
  67693. }
  67694. template <typename X, typename Y, typename... Z, typename Dispatch>
  67695. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getAccelerationStructureMemoryRequirementsNV( const AccelerationStructureMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67696. {
  67697. StructureChain<X, Y, Z...> structureChain;
  67698. VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR>();
  67699. d.vkGetAccelerationStructureMemoryRequirementsNV( m_device, reinterpret_cast<const VkAccelerationStructureMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2KHR *>( &memoryRequirements ) );
  67700. return structureChain;
  67701. }
  67702. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67703. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  67704. template <typename Dispatch>
  67705. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer* buffer, VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67706. {
  67707. return static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, buffer, reinterpret_cast< VkAndroidHardwareBufferPropertiesANDROID *>( pProperties ) ) );
  67708. }
  67709. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67710. template <typename Dispatch>
  67711. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  67712. {
  67713. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID properties;
  67714. Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
  67715. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getAndroidHardwareBufferPropertiesANDROID" );
  67716. }
  67717. template <typename X, typename Y, typename... Z, typename Dispatch>
  67718. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type Device::getAndroidHardwareBufferPropertiesANDROID( const struct AHardwareBuffer & buffer, Dispatch const & d ) const
  67719. {
  67720. StructureChain<X, Y, Z...> structureChain;
  67721. VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>();
  67722. Result result = static_cast<Result>( d.vkGetAndroidHardwareBufferPropertiesANDROID( m_device, &buffer, reinterpret_cast<VkAndroidHardwareBufferPropertiesANDROID *>( &properties ) ) );
  67723. return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::Device::getAndroidHardwareBufferPropertiesANDROID" );
  67724. }
  67725. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67726. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  67727. template <typename Dispatch>
  67728. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67729. {
  67730. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  67731. }
  67732. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67733. template <typename Dispatch>
  67734. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67735. {
  67736. return d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  67737. }
  67738. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67739. template <typename Dispatch>
  67740. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67741. {
  67742. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  67743. }
  67744. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67745. template <typename Dispatch>
  67746. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressEXT( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67747. {
  67748. return d.vkGetBufferDeviceAddressEXT( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  67749. }
  67750. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67751. template <typename Dispatch>
  67752. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67753. {
  67754. return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
  67755. }
  67756. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67757. template <typename Dispatch>
  67758. VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67759. {
  67760. return d.vkGetBufferDeviceAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  67761. }
  67762. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67763. template <typename Dispatch>
  67764. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67765. {
  67766. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast< VkMemoryRequirements *>( pMemoryRequirements ) );
  67767. }
  67768. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67769. template <typename Dispatch>
  67770. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67771. {
  67772. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  67773. d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  67774. return memoryRequirements;
  67775. }
  67776. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67777. template <typename Dispatch>
  67778. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67779. {
  67780. d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
  67781. }
  67782. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67783. template <typename Dispatch>
  67784. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67785. {
  67786. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  67787. d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  67788. return memoryRequirements;
  67789. }
  67790. template <typename X, typename Y, typename... Z, typename Dispatch>
  67791. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67792. {
  67793. StructureChain<X, Y, Z...> structureChain;
  67794. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  67795. d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  67796. return structureChain;
  67797. }
  67798. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67799. template <typename Dispatch>
  67800. VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67801. {
  67802. d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
  67803. }
  67804. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67805. template <typename Dispatch>
  67806. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67807. {
  67808. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  67809. d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  67810. return memoryRequirements;
  67811. }
  67812. template <typename X, typename Y, typename... Z, typename Dispatch>
  67813. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2KHR( const BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67814. {
  67815. StructureChain<X, Y, Z...> structureChain;
  67816. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  67817. d.vkGetBufferMemoryRequirements2KHR( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  67818. return structureChain;
  67819. }
  67820. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67821. template <typename Dispatch>
  67822. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67823. {
  67824. return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  67825. }
  67826. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67827. template <typename Dispatch>
  67828. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67829. {
  67830. return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  67831. }
  67832. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67833. template <typename Dispatch>
  67834. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67835. {
  67836. return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
  67837. }
  67838. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67839. template <typename Dispatch>
  67840. VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddressKHR( const BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67841. {
  67842. return d.vkGetBufferOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
  67843. }
  67844. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67845. template <typename Dispatch>
  67846. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getCalibratedTimestampsEXT( uint32_t timestampCount, const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67847. {
  67848. return static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampCount, reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( pTimestampInfos ), pTimestamps, pMaxDeviation ) );
  67849. }
  67850. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67851. template <typename Dispatch>
  67852. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  67853. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const &timestampInfos, ArrayProxy<uint64_t> const &timestamps, Dispatch const &d ) const
  67854. {
  67855. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  67856. VULKAN_HPP_ASSERT( timestampInfos.size() == timestamps.size() );
  67857. #else
  67858. if ( timestampInfos.size() != timestamps.size() )
  67859. {
  67860. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::VkDevice::getCalibratedTimestampsEXT: timestampInfos.size() != timestamps.size()" );
  67861. }
  67862. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  67863. uint64_t maxDeviation;
  67864. Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size() , reinterpret_cast<const VkCalibratedTimestampInfoEXT*>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
  67865. return createResultValue( result, maxDeviation, VULKAN_HPP_NAMESPACE_STRING"::Device::getCalibratedTimestampsEXT" );
  67866. }
  67867. template <typename Uint64_tAllocator, typename Dispatch>
  67868. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Dispatch const & d ) const
  67869. {
  67870. std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size() ), std::forward_as_tuple( 0 ) );
  67871. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
  67872. uint64_t & maxDeviation = data.second;
  67873. Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
  67874. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  67875. }
  67876. template <typename Uint64_tAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint64_t>::value, int>::type >
  67877. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<uint64_t, Uint64_tAllocator>, uint64_t>>::type Device::getCalibratedTimestampsEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT> const & timestampInfos, Uint64_tAllocator & uint64_tAllocator, Dispatch const & d ) const
  67878. {
  67879. std::pair<std::vector<uint64_t, Uint64_tAllocator>,uint64_t> data( std::piecewise_construct, std::forward_as_tuple( timestampInfos.size(), uint64_tAllocator ), std::forward_as_tuple( 0 ) );
  67880. std::vector<uint64_t, Uint64_tAllocator> & timestamps = data.first;
  67881. uint64_t & maxDeviation = data.second;
  67882. Result result = static_cast<Result>( d.vkGetCalibratedTimestampsEXT( m_device, timestampInfos.size(), reinterpret_cast<const VkCalibratedTimestampInfoEXT *>( timestampInfos.data() ), timestamps.data(), &maxDeviation ) );
  67883. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getCalibratedTimestampsEXT" );
  67884. }
  67885. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67886. template <typename Dispatch>
  67887. VULKAN_HPP_INLINE uint32_t Device::getDeferredOperationMaxConcurrencyKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67888. {
  67889. return d.vkGetDeferredOperationMaxConcurrencyKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) );
  67890. }
  67891. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67892. template <typename Dispatch>
  67893. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67894. {
  67895. return static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  67896. }
  67897. #else
  67898. template <typename Dispatch>
  67899. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getDeferredOperationResultKHR( VULKAN_HPP_NAMESPACE::DeferredOperationKHR operation, Dispatch const & d ) const
  67900. {
  67901. Result result = static_cast<Result>( d.vkGetDeferredOperationResultKHR( m_device, static_cast<VkDeferredOperationKHR>( operation ) ) );
  67902. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getDeferredOperationResultKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  67903. }
  67904. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67905. template <typename Dispatch>
  67906. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67907. {
  67908. d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( pSupport ) );
  67909. }
  67910. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67911. template <typename Dispatch>
  67912. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67913. {
  67914. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  67915. d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  67916. return support;
  67917. }
  67918. template <typename X, typename Y, typename... Z, typename Dispatch>
  67919. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67920. {
  67921. StructureChain<X, Y, Z...> structureChain;
  67922. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  67923. d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  67924. return structureChain;
  67925. }
  67926. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67927. template <typename Dispatch>
  67928. VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupportKHR( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo* pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport* pSupport, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67929. {
  67930. d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast< VkDescriptorSetLayoutSupport *>( pSupport ) );
  67931. }
  67932. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67933. template <typename Dispatch>
  67934. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67935. {
  67936. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
  67937. d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  67938. return support;
  67939. }
  67940. template <typename X, typename Y, typename... Z, typename Dispatch>
  67941. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupportKHR( const DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67942. {
  67943. StructureChain<X, Y, Z...> structureChain;
  67944. VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
  67945. d.vkGetDescriptorSetLayoutSupportKHR( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
  67946. return structureChain;
  67947. }
  67948. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67949. template <typename Dispatch>
  67950. VULKAN_HPP_INLINE void Device::getAccelerationStructureCompatibilityKHR( const VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR* pVersionInfo, VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR* pCompatibility, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67951. {
  67952. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( pVersionInfo ), reinterpret_cast< VkAccelerationStructureCompatibilityKHR *>( pCompatibility ) );
  67953. }
  67954. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67955. template <typename Dispatch>
  67956. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR Device::getAccelerationStructureCompatibilityKHR( const AccelerationStructureVersionInfoKHR & versionInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67957. {
  67958. VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR compatibility;
  67959. d.vkGetDeviceAccelerationStructureCompatibilityKHR( m_device, reinterpret_cast<const VkAccelerationStructureVersionInfoKHR *>( &versionInfo ), reinterpret_cast<VkAccelerationStructureCompatibilityKHR *>( &compatibility ) );
  67960. return compatibility;
  67961. }
  67962. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67963. template <typename Dispatch>
  67964. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67965. {
  67966. d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  67967. }
  67968. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67969. template <typename Dispatch>
  67970. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67971. {
  67972. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  67973. d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  67974. return peerMemoryFeatures;
  67975. }
  67976. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67977. template <typename Dispatch>
  67978. VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags* pPeerMemoryFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67979. {
  67980. d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast< VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
  67981. }
  67982. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67983. template <typename Dispatch>
  67984. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67985. {
  67986. VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
  67987. d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
  67988. return peerMemoryFeatures;
  67989. }
  67990. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  67991. template <typename Dispatch>
  67992. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  67993. {
  67994. return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast< VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) );
  67995. }
  67996. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  67997. template <typename Dispatch>
  67998. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const
  67999. {
  68000. VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities;
  68001. Result result = static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ) );
  68002. return createResultValue( result, deviceGroupPresentCapabilities, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" );
  68003. }
  68004. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68005. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68006. template <typename Dispatch>
  68007. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68008. {
  68009. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  68010. }
  68011. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68012. template <typename Dispatch>
  68013. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  68014. {
  68015. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  68016. Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModes2EXT( m_device, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
  68017. return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModes2EXT" );
  68018. }
  68019. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68020. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68021. template <typename Dispatch>
  68022. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR* pModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68023. {
  68024. return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) );
  68025. }
  68026. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68027. template <typename Dispatch>
  68028. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  68029. {
  68030. VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes;
  68031. Result result = static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ) );
  68032. return createResultValue( result, modes, VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" );
  68033. }
  68034. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68035. template <typename Dispatch>
  68036. VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize* pCommittedMemoryInBytes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68037. {
  68038. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast< VkDeviceSize *>( pCommittedMemoryInBytes ) );
  68039. }
  68040. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68041. template <typename Dispatch>
  68042. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68043. {
  68044. VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
  68045. d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
  68046. return committedMemoryInBytes;
  68047. }
  68048. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68049. template <typename Dispatch>
  68050. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68051. {
  68052. return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  68053. }
  68054. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68055. template <typename Dispatch>
  68056. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68057. {
  68058. return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  68059. }
  68060. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68061. template <typename Dispatch>
  68062. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68063. {
  68064. return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
  68065. }
  68066. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68067. template <typename Dispatch>
  68068. VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddressKHR( const DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68069. {
  68070. return d.vkGetDeviceMemoryOpaqueCaptureAddressKHR( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
  68071. }
  68072. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68073. template <typename Dispatch>
  68074. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char* pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68075. {
  68076. return d.vkGetDeviceProcAddr( m_device, pName );
  68077. }
  68078. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68079. template <typename Dispatch>
  68080. VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68081. {
  68082. return d.vkGetDeviceProcAddr( m_device, name.c_str() );
  68083. }
  68084. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68085. template <typename Dispatch>
  68086. VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68087. {
  68088. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast< VkQueue *>( pQueue ) );
  68089. }
  68090. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68091. template <typename Dispatch>
  68092. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68093. {
  68094. VULKAN_HPP_NAMESPACE::Queue queue;
  68095. d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
  68096. return queue;
  68097. }
  68098. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68099. template <typename Dispatch>
  68100. VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2* pQueueInfo, VULKAN_HPP_NAMESPACE::Queue* pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68101. {
  68102. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast< VkQueue *>( pQueue ) );
  68103. }
  68104. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68105. template <typename Dispatch>
  68106. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68107. {
  68108. VULKAN_HPP_NAMESPACE::Queue queue;
  68109. d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
  68110. return queue;
  68111. }
  68112. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68113. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68114. template <typename Dispatch>
  68115. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68116. {
  68117. return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
  68118. }
  68119. #else
  68120. template <typename Dispatch>
  68121. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  68122. {
  68123. Result result = static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
  68124. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
  68125. }
  68126. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68127. template <typename Dispatch>
  68128. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68129. {
  68130. return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  68131. }
  68132. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68133. template <typename Dispatch>
  68134. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  68135. {
  68136. int fd;
  68137. Result result = static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ) );
  68138. return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" );
  68139. }
  68140. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68141. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68142. template <typename Dispatch>
  68143. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68144. {
  68145. return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
  68146. }
  68147. #else
  68148. template <typename Dispatch>
  68149. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  68150. {
  68151. Result result = static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
  68152. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  68153. }
  68154. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68155. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68156. template <typename Dispatch>
  68157. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68158. {
  68159. return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  68160. }
  68161. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68162. template <typename Dispatch>
  68163. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  68164. {
  68165. HANDLE handle;
  68166. Result result = static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
  68167. return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" );
  68168. }
  68169. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68170. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68171. template <typename Dispatch>
  68172. VULKAN_HPP_INLINE void Device::getGeneratedCommandsMemoryRequirementsNV( const VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68173. {
  68174. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
  68175. }
  68176. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68177. template <typename Dispatch>
  68178. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68179. {
  68180. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  68181. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68182. return memoryRequirements;
  68183. }
  68184. template <typename X, typename Y, typename... Z, typename Dispatch>
  68185. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getGeneratedCommandsMemoryRequirementsNV( const GeneratedCommandsMemoryRequirementsInfoNV & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68186. {
  68187. StructureChain<X, Y, Z...> structureChain;
  68188. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  68189. d.vkGetGeneratedCommandsMemoryRequirementsNV( m_device, reinterpret_cast<const VkGeneratedCommandsMemoryRequirementsInfoNV *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68190. return structureChain;
  68191. }
  68192. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68193. template <typename Dispatch>
  68194. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68195. {
  68196. return static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkImageDrmFormatModifierPropertiesEXT *>( pProperties ) ) );
  68197. }
  68198. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68199. template <typename Dispatch>
  68200. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::type Device::getImageDrmFormatModifierPropertiesEXT( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  68201. {
  68202. VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT properties;
  68203. Result result = static_cast<Result>( d.vkGetImageDrmFormatModifierPropertiesEXT( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkImageDrmFormatModifierPropertiesEXT *>( &properties ) ) );
  68204. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageDrmFormatModifierPropertiesEXT" );
  68205. }
  68206. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68207. template <typename Dispatch>
  68208. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68209. {
  68210. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast< VkMemoryRequirements *>( pMemoryRequirements ) );
  68211. }
  68212. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68213. template <typename Dispatch>
  68214. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68215. {
  68216. VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
  68217. d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
  68218. return memoryRequirements;
  68219. }
  68220. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68221. template <typename Dispatch>
  68222. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68223. {
  68224. d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
  68225. }
  68226. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68227. template <typename Dispatch>
  68228. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68229. {
  68230. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  68231. d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68232. return memoryRequirements;
  68233. }
  68234. template <typename X, typename Y, typename... Z, typename Dispatch>
  68235. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68236. {
  68237. StructureChain<X, Y, Z...> structureChain;
  68238. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  68239. d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68240. return structureChain;
  68241. }
  68242. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68243. template <typename Dispatch>
  68244. VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2* pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2* pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68245. {
  68246. d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast< VkMemoryRequirements2 *>( pMemoryRequirements ) );
  68247. }
  68248. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68249. template <typename Dispatch>
  68250. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68251. {
  68252. VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
  68253. d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68254. return memoryRequirements;
  68255. }
  68256. template <typename X, typename Y, typename... Z, typename Dispatch>
  68257. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2KHR( const ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68258. {
  68259. StructureChain<X, Y, Z...> structureChain;
  68260. VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
  68261. d.vkGetImageMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
  68262. return structureChain;
  68263. }
  68264. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68265. template <typename Dispatch>
  68266. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements* pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68267. {
  68268. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
  68269. }
  68270. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68271. template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
  68272. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
  68273. {
  68274. std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
  68275. uint32_t sparseMemoryRequirementCount;
  68276. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  68277. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68278. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  68279. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68280. return sparseMemoryRequirements;
  68281. }
  68282. template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements>::value, int>::type >
  68283. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const
  68284. {
  68285. std::vector<SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator );
  68286. uint32_t sparseMemoryRequirementCount;
  68287. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
  68288. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68289. d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
  68290. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68291. return sparseMemoryRequirements;
  68292. }
  68293. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68294. template <typename Dispatch>
  68295. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68296. {
  68297. d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  68298. }
  68299. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68300. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  68301. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  68302. {
  68303. std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  68304. uint32_t sparseMemoryRequirementCount;
  68305. d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  68306. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68307. d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  68308. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68309. return sparseMemoryRequirements;
  68310. }
  68311. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type >
  68312. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
  68313. {
  68314. std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
  68315. uint32_t sparseMemoryRequirementCount;
  68316. d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  68317. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68318. d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  68319. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68320. return sparseMemoryRequirements;
  68321. }
  68322. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68323. template <typename Dispatch>
  68324. VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2KHR( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2* pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68325. {
  68326. d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast< VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
  68327. }
  68328. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68329. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
  68330. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
  68331. {
  68332. std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
  68333. uint32_t sparseMemoryRequirementCount;
  68334. d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  68335. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68336. d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  68337. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68338. return sparseMemoryRequirements;
  68339. }
  68340. template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageMemoryRequirements2>::value, int>::type >
  68341. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2KHR( const ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
  68342. {
  68343. std::vector<SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
  68344. uint32_t sparseMemoryRequirementCount;
  68345. d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
  68346. sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
  68347. d.vkGetImageSparseMemoryRequirements2KHR( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
  68348. VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
  68349. return sparseMemoryRequirements;
  68350. }
  68351. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68352. template <typename Dispatch>
  68353. VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource* pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout* pLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68354. {
  68355. d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast< VkSubresourceLayout *>( pLayout ) );
  68356. }
  68357. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68358. template <typename Dispatch>
  68359. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68360. {
  68361. VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
  68362. d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) );
  68363. return layout;
  68364. }
  68365. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68366. template <typename Dispatch>
  68367. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68368. {
  68369. return static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast< VkImageViewAddressPropertiesNVX *>( pProperties ) ) );
  68370. }
  68371. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68372. template <typename Dispatch>
  68373. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const
  68374. {
  68375. VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties;
  68376. Result result = static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ) );
  68377. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" );
  68378. }
  68379. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68380. template <typename Dispatch>
  68381. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68382. {
  68383. return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) );
  68384. }
  68385. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68386. template <typename Dispatch>
  68387. VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68388. {
  68389. return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) );
  68390. }
  68391. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68392. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  68393. template <typename Dispatch>
  68394. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryAndroidHardwareBufferANDROID( const VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68395. {
  68396. return static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( pInfo ), pBuffer ) );
  68397. }
  68398. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68399. template <typename Dispatch>
  68400. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<struct AHardwareBuffer*>::type Device::getMemoryAndroidHardwareBufferANDROID( const MemoryGetAndroidHardwareBufferInfoANDROID & info, Dispatch const & d ) const
  68401. {
  68402. struct AHardwareBuffer* buffer;
  68403. Result result = static_cast<Result>( d.vkGetMemoryAndroidHardwareBufferANDROID( m_device, reinterpret_cast<const VkMemoryGetAndroidHardwareBufferInfoANDROID *>( &info ), &buffer ) );
  68404. return createResultValue( result, buffer, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryAndroidHardwareBufferANDROID" );
  68405. }
  68406. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68407. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  68408. template <typename Dispatch>
  68409. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68410. {
  68411. return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  68412. }
  68413. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68414. template <typename Dispatch>
  68415. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  68416. {
  68417. int fd;
  68418. Result result = static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ) );
  68419. return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" );
  68420. }
  68421. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68422. template <typename Dispatch>
  68423. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR* pMemoryFdProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68424. {
  68425. return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast< VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) );
  68426. }
  68427. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68428. template <typename Dispatch>
  68429. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const
  68430. {
  68431. VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties;
  68432. Result result = static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ) );
  68433. return createResultValue( result, memoryFdProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" );
  68434. }
  68435. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68436. template <typename Dispatch>
  68437. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68438. {
  68439. return static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast< VkMemoryHostPointerPropertiesEXT *>( pMemoryHostPointerProperties ) ) );
  68440. }
  68441. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68442. template <typename Dispatch>
  68443. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::type Device::getMemoryHostPointerPropertiesEXT( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, Dispatch const & d ) const
  68444. {
  68445. VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT memoryHostPointerProperties;
  68446. Result result = static_cast<Result>( d.vkGetMemoryHostPointerPropertiesEXT( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), pHostPointer, reinterpret_cast<VkMemoryHostPointerPropertiesEXT *>( &memoryHostPointerProperties ) ) );
  68447. return createResultValue( result, memoryHostPointerProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryHostPointerPropertiesEXT" );
  68448. }
  68449. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68450. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68451. template <typename Dispatch>
  68452. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68453. {
  68454. return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  68455. }
  68456. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68457. template <typename Dispatch>
  68458. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  68459. {
  68460. HANDLE handle;
  68461. Result result = static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
  68462. return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" );
  68463. }
  68464. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68465. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68466. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68467. template <typename Dispatch>
  68468. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68469. {
  68470. return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) );
  68471. }
  68472. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68473. template <typename Dispatch>
  68474. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const
  68475. {
  68476. HANDLE handle;
  68477. Result result = static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ) );
  68478. return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" );
  68479. }
  68480. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68481. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68482. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68483. template <typename Dispatch>
  68484. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68485. {
  68486. return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast< VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) );
  68487. }
  68488. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68489. template <typename Dispatch>
  68490. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const
  68491. {
  68492. VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties;
  68493. Result result = static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ) );
  68494. return createResultValue( result, memoryWin32HandleProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" );
  68495. }
  68496. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68497. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68498. template <typename Dispatch>
  68499. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE* pPresentationTimings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68500. {
  68501. return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast< VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) );
  68502. }
  68503. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68504. template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch>
  68505. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  68506. {
  68507. std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings;
  68508. uint32_t presentationTimingCount;
  68509. Result result;
  68510. do
  68511. {
  68512. result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
  68513. if ( ( result == Result::eSuccess ) && presentationTimingCount )
  68514. {
  68515. presentationTimings.resize( presentationTimingCount );
  68516. result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
  68517. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  68518. }
  68519. } while ( result == Result::eIncomplete );
  68520. if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
  68521. {
  68522. presentationTimings.resize( presentationTimingCount );
  68523. }
  68524. return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
  68525. }
  68526. template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PastPresentationTimingGOOGLE>::value, int>::type >
  68527. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, Dispatch const & d ) const
  68528. {
  68529. std::vector<PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( pastPresentationTimingGOOGLEAllocator );
  68530. uint32_t presentationTimingCount;
  68531. Result result;
  68532. do
  68533. {
  68534. result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ) );
  68535. if ( ( result == Result::eSuccess ) && presentationTimingCount )
  68536. {
  68537. presentationTimings.resize( presentationTimingCount );
  68538. result = static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ) );
  68539. VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() );
  68540. }
  68541. } while ( result == Result::eIncomplete );
  68542. if ( ( result == Result::eSuccess ) && ( presentationTimingCount < presentationTimings.size() ) )
  68543. {
  68544. presentationTimings.resize( presentationTimingCount );
  68545. }
  68546. return createResultValue( result, presentationTimings, VULKAN_HPP_NAMESPACE_STRING"::Device::getPastPresentationTimingGOOGLE" );
  68547. }
  68548. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68549. template <typename Dispatch>
  68550. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, VULKAN_HPP_NAMESPACE::PerformanceValueINTEL* pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68551. {
  68552. return static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast< VkPerformanceValueINTEL *>( pValue ) ) );
  68553. }
  68554. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68555. template <typename Dispatch>
  68556. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::type Device::getPerformanceParameterINTEL( VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL parameter, Dispatch const & d ) const
  68557. {
  68558. VULKAN_HPP_NAMESPACE::PerformanceValueINTEL value;
  68559. Result result = static_cast<Result>( d.vkGetPerformanceParameterINTEL( m_device, static_cast<VkPerformanceParameterTypeINTEL>( parameter ), reinterpret_cast<VkPerformanceValueINTEL *>( &value ) ) );
  68560. return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getPerformanceParameterINTEL" );
  68561. }
  68562. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68563. template <typename Dispatch>
  68564. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t* pDataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68565. {
  68566. return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
  68567. }
  68568. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68569. template <typename Uint8_tAllocator, typename Dispatch>
  68570. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
  68571. {
  68572. std::vector<uint8_t, Uint8_tAllocator> data;
  68573. size_t dataSize;
  68574. Result result;
  68575. do
  68576. {
  68577. result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
  68578. if ( ( result == Result::eSuccess ) && dataSize )
  68579. {
  68580. data.resize( dataSize );
  68581. result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
  68582. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  68583. }
  68584. } while ( result == Result::eIncomplete );
  68585. if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
  68586. {
  68587. data.resize( dataSize );
  68588. }
  68589. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
  68590. }
  68591. template <typename Uint8_tAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type >
  68592. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  68593. {
  68594. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  68595. size_t dataSize;
  68596. Result result;
  68597. do
  68598. {
  68599. result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ) );
  68600. if ( ( result == Result::eSuccess ) && dataSize )
  68601. {
  68602. data.resize( dataSize );
  68603. result = static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
  68604. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  68605. }
  68606. } while ( result == Result::eIncomplete );
  68607. if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
  68608. {
  68609. data.resize( dataSize );
  68610. }
  68611. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineCacheData" );
  68612. }
  68613. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68614. template <typename Dispatch>
  68615. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableInternalRepresentationsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR* pInternalRepresentations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68616. {
  68617. return static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pInternalRepresentationCount, reinterpret_cast< VkPipelineExecutableInternalRepresentationKHR *>( pInternalRepresentations ) ) );
  68618. }
  68619. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68620. template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch>
  68621. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  68622. {
  68623. std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations;
  68624. uint32_t internalRepresentationCount;
  68625. Result result;
  68626. do
  68627. {
  68628. result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) );
  68629. if ( ( result == Result::eSuccess ) && internalRepresentationCount )
  68630. {
  68631. internalRepresentations.resize( internalRepresentationCount );
  68632. result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
  68633. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  68634. }
  68635. } while ( result == Result::eIncomplete );
  68636. if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
  68637. {
  68638. internalRepresentations.resize( internalRepresentationCount );
  68639. }
  68640. return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
  68641. }
  68642. template <typename PipelineExecutableInternalRepresentationKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableInternalRepresentationKHR>::value, int>::type >
  68643. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator>>::type Device::getPipelineExecutableInternalRepresentationsKHR( const PipelineExecutableInfoKHR & executableInfo, PipelineExecutableInternalRepresentationKHRAllocator & pipelineExecutableInternalRepresentationKHRAllocator, Dispatch const & d ) const
  68644. {
  68645. std::vector<PipelineExecutableInternalRepresentationKHR, PipelineExecutableInternalRepresentationKHRAllocator> internalRepresentations( pipelineExecutableInternalRepresentationKHRAllocator );
  68646. uint32_t internalRepresentationCount;
  68647. Result result;
  68648. do
  68649. {
  68650. result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, nullptr ) );
  68651. if ( ( result == Result::eSuccess ) && internalRepresentationCount )
  68652. {
  68653. internalRepresentations.resize( internalRepresentationCount );
  68654. result = static_cast<Result>( d.vkGetPipelineExecutableInternalRepresentationsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &internalRepresentationCount, reinterpret_cast<VkPipelineExecutableInternalRepresentationKHR *>( internalRepresentations.data() ) ) );
  68655. VULKAN_HPP_ASSERT( internalRepresentationCount <= internalRepresentations.size() );
  68656. }
  68657. } while ( result == Result::eIncomplete );
  68658. if ( ( result == Result::eSuccess ) && ( internalRepresentationCount < internalRepresentations.size() ) )
  68659. {
  68660. internalRepresentations.resize( internalRepresentationCount );
  68661. }
  68662. return createResultValue( result, internalRepresentations, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableInternalRepresentationsKHR" );
  68663. }
  68664. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68665. template <typename Dispatch>
  68666. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutablePropertiesKHR( const VULKAN_HPP_NAMESPACE::PipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68667. {
  68668. return static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( pPipelineInfo ), pExecutableCount, reinterpret_cast< VkPipelineExecutablePropertiesKHR *>( pProperties ) ) );
  68669. }
  68670. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68671. template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch>
  68672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, Dispatch const & d ) const
  68673. {
  68674. std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties;
  68675. uint32_t executableCount;
  68676. Result result;
  68677. do
  68678. {
  68679. result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
  68680. if ( ( result == Result::eSuccess ) && executableCount )
  68681. {
  68682. properties.resize( executableCount );
  68683. result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
  68684. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  68685. }
  68686. } while ( result == Result::eIncomplete );
  68687. if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
  68688. {
  68689. properties.resize( executableCount );
  68690. }
  68691. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
  68692. }
  68693. template <typename PipelineExecutablePropertiesKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutablePropertiesKHR>::value, int>::type >
  68694. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator>>::type Device::getPipelineExecutablePropertiesKHR( const PipelineInfoKHR & pipelineInfo, PipelineExecutablePropertiesKHRAllocator & pipelineExecutablePropertiesKHRAllocator, Dispatch const & d ) const
  68695. {
  68696. std::vector<PipelineExecutablePropertiesKHR, PipelineExecutablePropertiesKHRAllocator> properties( pipelineExecutablePropertiesKHRAllocator );
  68697. uint32_t executableCount;
  68698. Result result;
  68699. do
  68700. {
  68701. result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, nullptr ) );
  68702. if ( ( result == Result::eSuccess ) && executableCount )
  68703. {
  68704. properties.resize( executableCount );
  68705. result = static_cast<Result>( d.vkGetPipelineExecutablePropertiesKHR( m_device, reinterpret_cast<const VkPipelineInfoKHR *>( &pipelineInfo ), &executableCount, reinterpret_cast<VkPipelineExecutablePropertiesKHR *>( properties.data() ) ) );
  68706. VULKAN_HPP_ASSERT( executableCount <= properties.size() );
  68707. }
  68708. } while ( result == Result::eIncomplete );
  68709. if ( ( result == Result::eSuccess ) && ( executableCount < properties.size() ) )
  68710. {
  68711. properties.resize( executableCount );
  68712. }
  68713. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutablePropertiesKHR" );
  68714. }
  68715. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68716. template <typename Dispatch>
  68717. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineExecutableStatisticsKHR( const VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR* pStatistics, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68718. {
  68719. return static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( pExecutableInfo ), pStatisticCount, reinterpret_cast< VkPipelineExecutableStatisticKHR *>( pStatistics ) ) );
  68720. }
  68721. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68722. template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch>
  68723. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, Dispatch const & d ) const
  68724. {
  68725. std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics;
  68726. uint32_t statisticCount;
  68727. Result result;
  68728. do
  68729. {
  68730. result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) );
  68731. if ( ( result == Result::eSuccess ) && statisticCount )
  68732. {
  68733. statistics.resize( statisticCount );
  68734. result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
  68735. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  68736. }
  68737. } while ( result == Result::eIncomplete );
  68738. if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
  68739. {
  68740. statistics.resize( statisticCount );
  68741. }
  68742. return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
  68743. }
  68744. template <typename PipelineExecutableStatisticKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PipelineExecutableStatisticKHR>::value, int>::type >
  68745. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator>>::type Device::getPipelineExecutableStatisticsKHR( const PipelineExecutableInfoKHR & executableInfo, PipelineExecutableStatisticKHRAllocator & pipelineExecutableStatisticKHRAllocator, Dispatch const & d ) const
  68746. {
  68747. std::vector<PipelineExecutableStatisticKHR, PipelineExecutableStatisticKHRAllocator> statistics( pipelineExecutableStatisticKHRAllocator );
  68748. uint32_t statisticCount;
  68749. Result result;
  68750. do
  68751. {
  68752. result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, nullptr ) );
  68753. if ( ( result == Result::eSuccess ) && statisticCount )
  68754. {
  68755. statistics.resize( statisticCount );
  68756. result = static_cast<Result>( d.vkGetPipelineExecutableStatisticsKHR( m_device, reinterpret_cast<const VkPipelineExecutableInfoKHR *>( &executableInfo ), &statisticCount, reinterpret_cast<VkPipelineExecutableStatisticKHR *>( statistics.data() ) ) );
  68757. VULKAN_HPP_ASSERT( statisticCount <= statistics.size() );
  68758. }
  68759. } while ( result == Result::eIncomplete );
  68760. if ( ( result == Result::eSuccess ) && ( statisticCount < statistics.size() ) )
  68761. {
  68762. statistics.resize( statisticCount );
  68763. }
  68764. return createResultValue( result, statistics, VULKAN_HPP_NAMESPACE_STRING"::Device::getPipelineExecutableStatisticsKHR" );
  68765. }
  68766. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68767. template <typename Dispatch>
  68768. VULKAN_HPP_INLINE void Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68769. {
  68770. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), pData );
  68771. }
  68772. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68773. template <typename Dispatch>
  68774. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68775. {
  68776. uint64_t data;
  68777. d.vkGetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), &data );
  68778. return data;
  68779. }
  68780. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68781. template <typename Dispatch>
  68782. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68783. {
  68784. return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
  68785. }
  68786. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68787. template <typename T, typename Dispatch>
  68788. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  68789. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, ArrayProxy<T> const &data, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const &d ) const
  68790. {
  68791. Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
  68792. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getQueryPoolResults", { Result::eSuccess, Result::eNotReady } );
  68793. }
  68794. template <typename T, typename Allocator, typename Dispatch>
  68795. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<T,Allocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
  68796. {
  68797. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  68798. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  68799. Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
  68800. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  68801. }
  68802. template <typename T, typename Dispatch>
  68803. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<T> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
  68804. {
  68805. T data;
  68806. Result result = static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( T ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
  68807. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
  68808. }
  68809. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68810. template <typename Dispatch>
  68811. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68812. {
  68813. return static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  68814. }
  68815. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68816. template <typename T, typename Dispatch>
  68817. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  68818. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
  68819. {
  68820. Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
  68821. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  68822. }
  68823. template <typename T, typename Allocator, typename Dispatch>
  68824. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingCaptureReplayShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  68825. {
  68826. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  68827. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  68828. Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ) ) );
  68829. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandlesKHR" );
  68830. }
  68831. template <typename T, typename Dispatch>
  68832. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingCaptureReplayShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  68833. {
  68834. T data;
  68835. Result result = static_cast<Result>( d.vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void *>( &data ) ) );
  68836. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingCaptureReplayShaderGroupHandleKHR" );
  68837. }
  68838. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68839. template <typename Dispatch>
  68840. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68841. {
  68842. return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  68843. }
  68844. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68845. template <typename T, typename Dispatch>
  68846. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  68847. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
  68848. {
  68849. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
  68850. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesKHR" );
  68851. }
  68852. template <typename T, typename Allocator, typename Dispatch>
  68853. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  68854. {
  68855. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  68856. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  68857. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ) ) );
  68858. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesKHR" );
  68859. }
  68860. template <typename T, typename Dispatch>
  68861. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  68862. {
  68863. T data;
  68864. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesKHR( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void *>( &data ) ) );
  68865. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleKHR" );
  68866. }
  68867. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68868. template <typename Dispatch>
  68869. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68870. {
  68871. return static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, dataSize, pData ) );
  68872. }
  68873. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68874. template <typename T, typename Dispatch>
  68875. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  68876. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, ArrayProxy<T> const &data, Dispatch const &d ) const
  68877. {
  68878. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ) ) );
  68879. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::getRayTracingShaderGroupHandlesNV" );
  68880. }
  68881. template <typename T, typename Allocator, typename Dispatch>
  68882. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::getRayTracingShaderGroupHandlesNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, Dispatch const & d ) const
  68883. {
  68884. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  68885. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  68886. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ) ) );
  68887. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandlesNV" );
  68888. }
  68889. template <typename T, typename Dispatch>
  68890. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::getRayTracingShaderGroupHandleNV( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t firstGroup, uint32_t groupCount, Dispatch const & d ) const
  68891. {
  68892. T data;
  68893. Result result = static_cast<Result>( d.vkGetRayTracingShaderGroupHandlesNV( m_device, static_cast<VkPipeline>( pipeline ), firstGroup, groupCount, sizeof( T ), reinterpret_cast<void *>( &data ) ) );
  68894. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::getRayTracingShaderGroupHandleNV" );
  68895. }
  68896. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68897. template <typename Dispatch>
  68898. VULKAN_HPP_INLINE DeviceSize Device::getRayTracingShaderGroupStackSizeKHR( VULKAN_HPP_NAMESPACE::Pipeline pipeline, uint32_t group, VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR groupShader, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68899. {
  68900. return static_cast<DeviceSize>( d.vkGetRayTracingShaderGroupStackSizeKHR( m_device, static_cast<VkPipeline>( pipeline ), group, static_cast<VkShaderGroupShaderKHR>( groupShader ) ) );
  68901. }
  68902. template <typename Dispatch>
  68903. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE* pDisplayTimingProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68904. {
  68905. return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast< VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) );
  68906. }
  68907. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68908. template <typename Dispatch>
  68909. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  68910. {
  68911. VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties;
  68912. Result result = static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ) );
  68913. return createResultValue( result, displayTimingProperties, VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" );
  68914. }
  68915. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68916. template <typename Dispatch>
  68917. VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D* pGranularity, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68918. {
  68919. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast< VkExtent2D *>( pGranularity ) );
  68920. }
  68921. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68922. template <typename Dispatch>
  68923. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68924. {
  68925. VULKAN_HPP_NAMESPACE::Extent2D granularity;
  68926. d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
  68927. return granularity;
  68928. }
  68929. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68930. template <typename Dispatch>
  68931. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68932. {
  68933. return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  68934. }
  68935. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68936. template <typename Dispatch>
  68937. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  68938. {
  68939. uint64_t value;
  68940. Result result = static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
  68941. return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
  68942. }
  68943. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68944. template <typename Dispatch>
  68945. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t* pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68946. {
  68947. return static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
  68948. }
  68949. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68950. template <typename Dispatch>
  68951. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValueKHR( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
  68952. {
  68953. uint64_t value;
  68954. Result result = static_cast<Result>( d.vkGetSemaphoreCounterValueKHR( m_device, static_cast<VkSemaphore>( semaphore ), &value ) );
  68955. return createResultValue( result, value, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValueKHR" );
  68956. }
  68957. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68958. template <typename Dispatch>
  68959. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68960. {
  68961. return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) );
  68962. }
  68963. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68964. template <typename Dispatch>
  68965. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const
  68966. {
  68967. int fd;
  68968. Result result = static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ) );
  68969. return createResultValue( result, fd, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" );
  68970. }
  68971. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68972. #ifdef VK_USE_PLATFORM_WIN32_KHR
  68973. template <typename Dispatch>
  68974. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68975. {
  68976. return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) );
  68977. }
  68978. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68979. template <typename Dispatch>
  68980. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const
  68981. {
  68982. HANDLE handle;
  68983. Result result = static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ) );
  68984. return createResultValue( result, handle, VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" );
  68985. }
  68986. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  68987. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  68988. template <typename Dispatch>
  68989. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  68990. {
  68991. return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) );
  68992. }
  68993. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  68994. template <typename Uint8_tAllocator, typename Dispatch>
  68995. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const & d ) const
  68996. {
  68997. std::vector<uint8_t, Uint8_tAllocator> info;
  68998. size_t infoSize;
  68999. Result result;
  69000. do
  69001. {
  69002. result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ) );
  69003. if ( ( result == Result::eSuccess ) && infoSize )
  69004. {
  69005. info.resize( infoSize );
  69006. result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) ) );
  69007. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  69008. }
  69009. } while ( result == Result::eIncomplete );
  69010. if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
  69011. {
  69012. info.resize( infoSize );
  69013. }
  69014. return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
  69015. }
  69016. template <typename Uint8_tAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type >
  69017. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  69018. {
  69019. std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator );
  69020. size_t infoSize;
  69021. Result result;
  69022. do
  69023. {
  69024. result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ) );
  69025. if ( ( result == Result::eSuccess ) && infoSize )
  69026. {
  69027. info.resize( infoSize );
  69028. result = static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) ) );
  69029. VULKAN_HPP_ASSERT( infoSize <= info.size() );
  69030. }
  69031. } while ( result == Result::eIncomplete );
  69032. if ( ( result == Result::eSuccess ) && ( infoSize < info.size() ) )
  69033. {
  69034. info.resize( infoSize );
  69035. }
  69036. return createResultValue( result, info, VULKAN_HPP_NAMESPACE_STRING"::Device::getShaderInfoAMD" );
  69037. }
  69038. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69039. template <typename Dispatch>
  69040. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69041. {
  69042. return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) );
  69043. }
  69044. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69045. template <typename Dispatch>
  69046. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const
  69047. {
  69048. uint64_t counterValue;
  69049. Result result = static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ) );
  69050. return createResultValue( result, counterValue, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" );
  69051. }
  69052. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69053. template <typename Dispatch>
  69054. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image* pSwapchainImages, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69055. {
  69056. return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast< VkImage *>( pSwapchainImages ) ) );
  69057. }
  69058. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69059. template <typename ImageAllocator, typename Dispatch>
  69060. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  69061. {
  69062. std::vector<Image, ImageAllocator> swapchainImages;
  69063. uint32_t swapchainImageCount;
  69064. Result result;
  69065. do
  69066. {
  69067. result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
  69068. if ( ( result == Result::eSuccess ) && swapchainImageCount )
  69069. {
  69070. swapchainImages.resize( swapchainImageCount );
  69071. result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
  69072. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  69073. }
  69074. } while ( result == Result::eIncomplete );
  69075. if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
  69076. {
  69077. swapchainImages.resize( swapchainImageCount );
  69078. }
  69079. return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
  69080. }
  69081. template <typename ImageAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Image>::value, int>::type >
  69082. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const
  69083. {
  69084. std::vector<Image, ImageAllocator> swapchainImages( imageAllocator );
  69085. uint32_t swapchainImageCount;
  69086. Result result;
  69087. do
  69088. {
  69089. result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ) );
  69090. if ( ( result == Result::eSuccess ) && swapchainImageCount )
  69091. {
  69092. swapchainImages.resize( swapchainImageCount );
  69093. result = static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ) );
  69094. VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() );
  69095. }
  69096. } while ( result == Result::eIncomplete );
  69097. if ( ( result == Result::eSuccess ) && ( swapchainImageCount < swapchainImages.size() ) )
  69098. {
  69099. swapchainImages.resize( swapchainImageCount );
  69100. }
  69101. return createResultValue( result, swapchainImages, VULKAN_HPP_NAMESPACE_STRING"::Device::getSwapchainImagesKHR" );
  69102. }
  69103. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69104. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69105. template <typename Dispatch>
  69106. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69107. {
  69108. return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  69109. }
  69110. #else
  69111. template <typename Dispatch>
  69112. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  69113. {
  69114. Result result = static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  69115. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  69116. }
  69117. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69118. template <typename Dispatch>
  69119. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, size_t* pDataSize, void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69120. {
  69121. return static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), pDataSize, pData ) );
  69122. }
  69123. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69124. template <typename Uint8_tAllocator, typename Dispatch>
  69125. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Dispatch const & d ) const
  69126. {
  69127. std::vector<uint8_t, Uint8_tAllocator> data;
  69128. size_t dataSize;
  69129. Result result;
  69130. do
  69131. {
  69132. result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
  69133. if ( ( result == Result::eSuccess ) && dataSize )
  69134. {
  69135. data.resize( dataSize );
  69136. result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
  69137. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  69138. }
  69139. } while ( result == Result::eIncomplete );
  69140. if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
  69141. {
  69142. data.resize( dataSize );
  69143. }
  69144. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
  69145. }
  69146. template <typename Uint8_tAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, uint8_t>::value, int>::type >
  69147. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getValidationCacheDataEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT validationCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
  69148. {
  69149. std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
  69150. size_t dataSize;
  69151. Result result;
  69152. do
  69153. {
  69154. result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, nullptr ) );
  69155. if ( ( result == Result::eSuccess ) && dataSize )
  69156. {
  69157. data.resize( dataSize );
  69158. result = static_cast<Result>( d.vkGetValidationCacheDataEXT( m_device, static_cast<VkValidationCacheEXT>( validationCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ) );
  69159. VULKAN_HPP_ASSERT( dataSize <= data.size() );
  69160. }
  69161. } while ( result == Result::eIncomplete );
  69162. if ( ( result == Result::eSuccess ) && ( dataSize < data.size() ) )
  69163. {
  69164. data.resize( dataSize );
  69165. }
  69166. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::Device::getValidationCacheDataEXT" );
  69167. }
  69168. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69169. template <typename Dispatch>
  69170. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR* pImportFenceFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69171. {
  69172. return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) );
  69173. }
  69174. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69175. template <typename Dispatch>
  69176. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const
  69177. {
  69178. Result result = static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ) );
  69179. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" );
  69180. }
  69181. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69182. #ifdef VK_USE_PLATFORM_WIN32_KHR
  69183. template <typename Dispatch>
  69184. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69185. {
  69186. return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) );
  69187. }
  69188. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69189. template <typename Dispatch>
  69190. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const
  69191. {
  69192. Result result = static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ) );
  69193. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" );
  69194. }
  69195. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69196. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  69197. template <typename Dispatch>
  69198. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69199. {
  69200. return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) );
  69201. }
  69202. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69203. template <typename Dispatch>
  69204. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const
  69205. {
  69206. Result result = static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ) );
  69207. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" );
  69208. }
  69209. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69210. #ifdef VK_USE_PLATFORM_WIN32_KHR
  69211. template <typename Dispatch>
  69212. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69213. {
  69214. return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) );
  69215. }
  69216. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69217. template <typename Dispatch>
  69218. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const
  69219. {
  69220. Result result = static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ) );
  69221. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" );
  69222. }
  69223. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69224. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  69225. template <typename Dispatch>
  69226. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::initializePerformanceApiINTEL( const VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL* pInitializeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69227. {
  69228. return static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( pInitializeInfo ) ) );
  69229. }
  69230. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69231. template <typename Dispatch>
  69232. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::initializePerformanceApiINTEL( const InitializePerformanceApiInfoINTEL & initializeInfo, Dispatch const & d ) const
  69233. {
  69234. Result result = static_cast<Result>( d.vkInitializePerformanceApiINTEL( m_device, reinterpret_cast<const VkInitializePerformanceApiInfoINTEL *>( &initializeInfo ) ) );
  69235. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::initializePerformanceApiINTEL" );
  69236. }
  69237. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69238. template <typename Dispatch>
  69239. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange* pMemoryRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69240. {
  69241. return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
  69242. }
  69243. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69244. template <typename Dispatch>
  69245. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
  69246. {
  69247. Result result = static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ) );
  69248. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
  69249. }
  69250. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69251. template <typename Dispatch>
  69252. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69253. {
  69254. return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
  69255. }
  69256. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69257. template <typename Dispatch>
  69258. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void*>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const
  69259. {
  69260. void* pData;
  69261. Result result = static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ) );
  69262. return createResultValue( result, pData, VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
  69263. }
  69264. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69265. template <typename Dispatch>
  69266. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache* pSrcCaches, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69267. {
  69268. return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
  69269. }
  69270. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69271. template <typename Dispatch>
  69272. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const
  69273. {
  69274. Result result = static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ) );
  69275. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
  69276. }
  69277. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69278. template <typename Dispatch>
  69279. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::ValidationCacheEXT* pSrcCaches, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69280. {
  69281. return static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCacheCount, reinterpret_cast<const VkValidationCacheEXT *>( pSrcCaches ) ) );
  69282. }
  69283. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69284. template <typename Dispatch>
  69285. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergeValidationCachesEXT( VULKAN_HPP_NAMESPACE::ValidationCacheEXT dstCache, ArrayProxy<const VULKAN_HPP_NAMESPACE::ValidationCacheEXT> const & srcCaches, Dispatch const & d ) const
  69286. {
  69287. Result result = static_cast<Result>( d.vkMergeValidationCachesEXT( m_device, static_cast<VkValidationCacheEXT>( dstCache ), srcCaches.size(), reinterpret_cast<const VkValidationCacheEXT *>( srcCaches.data() ) ) );
  69288. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::mergeValidationCachesEXT" );
  69289. }
  69290. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69291. template <typename Dispatch>
  69292. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT* pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69293. {
  69294. return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
  69295. }
  69296. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69297. template <typename Dispatch>
  69298. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69299. {
  69300. VULKAN_HPP_NAMESPACE::Fence fence;
  69301. Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  69302. return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" );
  69303. }
  69304. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69305. template <typename Dispatch>
  69306. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerEventEXTUnique( const DeviceEventInfoEXT & deviceEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69307. {
  69308. VULKAN_HPP_NAMESPACE::Fence fence;
  69309. Result result = static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  69310. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  69311. return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique", deleter );
  69312. }
  69313. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69314. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69315. template <typename Dispatch>
  69316. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT* pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Fence* pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69317. {
  69318. return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkFence *>( pFence ) ) );
  69319. }
  69320. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69321. template <typename Dispatch>
  69322. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69323. {
  69324. VULKAN_HPP_NAMESPACE::Fence fence;
  69325. Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  69326. return createResultValue( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" );
  69327. }
  69328. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69329. template <typename Dispatch>
  69330. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayEventInfoEXT & displayEventInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69331. {
  69332. VULKAN_HPP_NAMESPACE::Fence fence;
  69333. Result result = static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ) );
  69334. ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
  69335. return createResultValue<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( result, fence, VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique", deleter );
  69336. }
  69337. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69338. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69339. #ifdef VK_USE_PLATFORM_WIN32_KHR
  69340. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69341. template <typename Dispatch>
  69342. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69343. {
  69344. return static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  69345. }
  69346. #else
  69347. template <typename Dispatch>
  69348. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releaseFullScreenExclusiveModeEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const
  69349. {
  69350. Result result = static_cast<Result>( d.vkReleaseFullScreenExclusiveModeEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) );
  69351. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releaseFullScreenExclusiveModeEXT" );
  69352. }
  69353. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69354. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  69355. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69356. template <typename Dispatch>
  69357. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69358. {
  69359. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  69360. }
  69361. #else
  69362. template <typename Dispatch>
  69363. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::releasePerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  69364. {
  69365. Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  69366. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::releasePerformanceConfigurationINTEL" );
  69367. }
  69368. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69369. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69370. template <typename Dispatch>
  69371. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69372. {
  69373. return static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  69374. }
  69375. #else
  69376. template <typename Dispatch>
  69377. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::release( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  69378. {
  69379. Result result = static_cast<Result>( d.vkReleasePerformanceConfigurationINTEL( m_device, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  69380. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::release" );
  69381. }
  69382. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69383. template <typename Dispatch>
  69384. VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69385. {
  69386. d.vkReleaseProfilingLockKHR( m_device );
  69387. }
  69388. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69389. template <typename Dispatch>
  69390. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69391. {
  69392. return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
  69393. }
  69394. #else
  69395. template <typename Dispatch>
  69396. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
  69397. {
  69398. Result result = static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
  69399. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
  69400. }
  69401. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69402. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69403. template <typename Dispatch>
  69404. VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69405. {
  69406. return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
  69407. }
  69408. #else
  69409. template <typename Dispatch>
  69410. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const
  69411. {
  69412. Result result = static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
  69413. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetDescriptorPool" );
  69414. }
  69415. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69416. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69417. template <typename Dispatch>
  69418. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69419. {
  69420. return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
  69421. }
  69422. #else
  69423. template <typename Dispatch>
  69424. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  69425. {
  69426. Result result = static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
  69427. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
  69428. }
  69429. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69430. template <typename Dispatch>
  69431. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69432. {
  69433. return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
  69434. }
  69435. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69436. template <typename Dispatch>
  69437. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
  69438. {
  69439. Result result = static_cast<Result>( d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ) );
  69440. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
  69441. }
  69442. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69443. template <typename Dispatch>
  69444. VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69445. {
  69446. d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  69447. }
  69448. template <typename Dispatch>
  69449. VULKAN_HPP_INLINE void Device::resetQueryPoolEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69450. {
  69451. d.vkResetQueryPoolEXT( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
  69452. }
  69453. template <typename Dispatch>
  69454. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT* pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69455. {
  69456. return static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( pNameInfo ) ) );
  69457. }
  69458. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69459. template <typename Dispatch>
  69460. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectNameEXT( const DebugUtilsObjectNameInfoEXT & nameInfo, Dispatch const & d ) const
  69461. {
  69462. Result result = static_cast<Result>( d.vkSetDebugUtilsObjectNameEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectNameInfoEXT *>( &nameInfo ) ) );
  69463. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectNameEXT" );
  69464. }
  69465. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69466. template <typename Dispatch>
  69467. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setDebugUtilsObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT* pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69468. {
  69469. return static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( pTagInfo ) ) );
  69470. }
  69471. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69472. template <typename Dispatch>
  69473. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setDebugUtilsObjectTagEXT( const DebugUtilsObjectTagInfoEXT & tagInfo, Dispatch const & d ) const
  69474. {
  69475. Result result = static_cast<Result>( d.vkSetDebugUtilsObjectTagEXT( m_device, reinterpret_cast<const VkDebugUtilsObjectTagInfoEXT *>( &tagInfo ) ) );
  69476. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setDebugUtilsObjectTagEXT" );
  69477. }
  69478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69479. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69480. template <typename Dispatch>
  69481. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69482. {
  69483. return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
  69484. }
  69485. #else
  69486. template <typename Dispatch>
  69487. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
  69488. {
  69489. Result result = static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
  69490. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
  69491. }
  69492. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69493. template <typename Dispatch>
  69494. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR* pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT* pMetadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69495. {
  69496. d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) );
  69497. }
  69498. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69499. template <typename Dispatch>
  69500. VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
  69501. {
  69502. #ifdef VULKAN_HPP_NO_EXCEPTIONS
  69503. VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() );
  69504. #else
  69505. if ( swapchains.size() != metadata.size() )
  69506. {
  69507. throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" );
  69508. }
  69509. #endif /*VULKAN_HPP_NO_EXCEPTIONS*/
  69510. d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) );
  69511. }
  69512. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69513. template <typename Dispatch>
  69514. VULKAN_HPP_INLINE void Device::setLocalDimmingAMD( VULKAN_HPP_NAMESPACE::SwapchainKHR swapChain, VULKAN_HPP_NAMESPACE::Bool32 localDimmingEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69515. {
  69516. d.vkSetLocalDimmingAMD( m_device, static_cast<VkSwapchainKHR>( swapChain ), static_cast<VkBool32>( localDimmingEnable ) );
  69517. }
  69518. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69519. template <typename Dispatch>
  69520. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69521. {
  69522. return static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
  69523. }
  69524. #else
  69525. template <typename Dispatch>
  69526. VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateDataEXT( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT privateDataSlot, uint64_t data, Dispatch const & d ) const
  69527. {
  69528. Result result = static_cast<Result>( d.vkSetPrivateDataEXT( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlotEXT>( privateDataSlot ), data ) );
  69529. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateDataEXT" );
  69530. }
  69531. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69532. template <typename Dispatch>
  69533. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69534. {
  69535. return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  69536. }
  69537. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69538. template <typename Dispatch>
  69539. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  69540. {
  69541. Result result = static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
  69542. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
  69543. }
  69544. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69545. template <typename Dispatch>
  69546. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphoreKHR( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo* pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69547. {
  69548. return static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
  69549. }
  69550. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69551. template <typename Dispatch>
  69552. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphoreKHR( const SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
  69553. {
  69554. Result result = static_cast<Result>( d.vkSignalSemaphoreKHR( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ) );
  69555. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphoreKHR" );
  69556. }
  69557. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69558. template <typename Dispatch>
  69559. VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69560. {
  69561. d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  69562. }
  69563. template <typename Dispatch>
  69564. VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69565. {
  69566. d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
  69567. }
  69568. template <typename Dispatch>
  69569. VULKAN_HPP_INLINE void Device::uninitializePerformanceApiINTEL( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69570. {
  69571. d.vkUninitializePerformanceApiINTEL( m_device );
  69572. }
  69573. template <typename Dispatch>
  69574. VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69575. {
  69576. d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
  69577. }
  69578. template <typename Dispatch>
  69579. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69580. {
  69581. d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  69582. }
  69583. template <typename Dispatch>
  69584. VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69585. {
  69586. d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
  69587. }
  69588. template <typename Dispatch>
  69589. VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet* pDescriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69590. {
  69591. d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
  69592. }
  69593. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69594. template <typename Dispatch>
  69595. VULKAN_HPP_INLINE void Device::updateDescriptorSets( ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69596. {
  69597. d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
  69598. }
  69599. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69600. template <typename Dispatch>
  69601. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence* pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69602. {
  69603. return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
  69604. }
  69605. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69606. template <typename Dispatch>
  69607. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const
  69608. {
  69609. Result result = static_cast<Result>( d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ) );
  69610. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  69611. }
  69612. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69613. template <typename Dispatch>
  69614. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69615. {
  69616. return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  69617. }
  69618. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69619. template <typename Dispatch>
  69620. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  69621. {
  69622. Result result = static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
  69623. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  69624. }
  69625. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69626. template <typename Dispatch>
  69627. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo* pWaitInfo, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69628. {
  69629. return static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
  69630. }
  69631. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69632. template <typename Dispatch>
  69633. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphoresKHR( const SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
  69634. {
  69635. Result result = static_cast<Result>( d.vkWaitSemaphoresKHR( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ) );
  69636. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphoresKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
  69637. }
  69638. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69639. template <typename Dispatch>
  69640. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::writeAccelerationStructuresPropertiesKHR( uint32_t accelerationStructureCount, const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR* pAccelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, void* pData, size_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69641. {
  69642. return static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructureCount, reinterpret_cast<const VkAccelerationStructureKHR *>( pAccelerationStructures ), static_cast<VkQueryType>( queryType ), dataSize, pData, stride ) );
  69643. }
  69644. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69645. template <typename T, typename Dispatch>
  69646. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  69647. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const &accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, ArrayProxy<T> const &data, size_t stride, Dispatch const &d ) const
  69648. {
  69649. Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size() , reinterpret_cast<const VkAccelerationStructureKHR*>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( T ) , reinterpret_cast<void*>( data.data() ), stride ) );
  69650. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING"::Device::writeAccelerationStructuresPropertiesKHR" );
  69651. }
  69652. template <typename T, typename Allocator, typename Dispatch>
  69653. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<std::vector<T,Allocator>>::type Device::writeAccelerationStructuresPropertiesKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t dataSize, size_t stride, Dispatch const & d ) const
  69654. {
  69655. VULKAN_HPP_ASSERT( dataSize % sizeof( T ) == 0 );
  69656. std::vector<T,Allocator> data( dataSize / sizeof( T ) );
  69657. Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), data.size() * sizeof( T ), reinterpret_cast<void *>( data.data() ), stride ) );
  69658. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertiesKHR" );
  69659. }
  69660. template <typename T, typename Dispatch>
  69661. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<T>::type Device::writeAccelerationStructuresPropertyKHR( ArrayProxy<const VULKAN_HPP_NAMESPACE::AccelerationStructureKHR> const & accelerationStructures, VULKAN_HPP_NAMESPACE::QueryType queryType, size_t stride, Dispatch const & d ) const
  69662. {
  69663. T data;
  69664. Result result = static_cast<Result>( d.vkWriteAccelerationStructuresPropertiesKHR( m_device, accelerationStructures.size(), reinterpret_cast<const VkAccelerationStructureKHR *>( accelerationStructures.data() ), static_cast<VkQueryType>( queryType ), sizeof( T ), reinterpret_cast<void *>( &data ), stride ) );
  69665. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING "::Device::writeAccelerationStructuresPropertyKHR" );
  69666. }
  69667. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69668. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  69669. template <typename Dispatch>
  69670. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69671. {
  69672. return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69673. }
  69674. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69675. template <typename Dispatch>
  69676. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69677. {
  69678. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69679. Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69680. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" );
  69681. }
  69682. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69683. template <typename Dispatch>
  69684. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const AndroidSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69685. {
  69686. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69687. Result result = static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69688. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69689. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique", deleter );
  69690. }
  69691. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69692. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69693. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  69694. template <typename Dispatch>
  69695. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT* pCallback, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69696. {
  69697. return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDebugReportCallbackEXT *>( pCallback ) ) );
  69698. }
  69699. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69700. template <typename Dispatch>
  69701. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69702. {
  69703. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  69704. Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
  69705. return createResultValue( result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" );
  69706. }
  69707. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69708. template <typename Dispatch>
  69709. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const DebugReportCallbackCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69710. {
  69711. VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback;
  69712. Result result = static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ) );
  69713. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69714. return createResultValue<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( result, callback, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique", deleter );
  69715. }
  69716. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69717. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69718. template <typename Dispatch>
  69719. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugUtilsMessengerEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT* pMessenger, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69720. {
  69721. return static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDebugUtilsMessengerEXT *>( pMessenger ) ) );
  69722. }
  69723. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69724. template <typename Dispatch>
  69725. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::type Instance::createDebugUtilsMessengerEXT( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69726. {
  69727. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  69728. Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
  69729. return createResultValue( result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXT" );
  69730. }
  69731. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69732. template <typename Dispatch>
  69733. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>>::type Instance::createDebugUtilsMessengerEXTUnique( const DebugUtilsMessengerCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69734. {
  69735. VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger;
  69736. Result result = static_cast<Result>( d.vkCreateDebugUtilsMessengerEXT( m_instance, reinterpret_cast<const VkDebugUtilsMessengerCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDebugUtilsMessengerEXT *>( &messenger ) ) );
  69737. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69738. return createResultValue<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT, Dispatch>( result, messenger, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugUtilsMessengerEXTUnique", deleter );
  69739. }
  69740. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69741. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69742. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  69743. template <typename Dispatch>
  69744. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDirectFBSurfaceEXT( const VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69745. {
  69746. return static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69747. }
  69748. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69749. template <typename Dispatch>
  69750. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDirectFBSurfaceEXT( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69751. {
  69752. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69753. Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69754. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXT" );
  69755. }
  69756. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69757. template <typename Dispatch>
  69758. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDirectFBSurfaceEXTUnique( const DirectFBSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69759. {
  69760. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69761. Result result = static_cast<Result>( d.vkCreateDirectFBSurfaceEXT( m_instance, reinterpret_cast<const VkDirectFBSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69762. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69763. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDirectFBSurfaceEXTUnique", deleter );
  69764. }
  69765. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69766. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69767. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  69768. template <typename Dispatch>
  69769. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69770. {
  69771. return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69772. }
  69773. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69774. template <typename Dispatch>
  69775. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69776. {
  69777. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69778. Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69779. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" );
  69780. }
  69781. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69782. template <typename Dispatch>
  69783. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const DisplaySurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69784. {
  69785. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69786. Result result = static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69787. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69788. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique", deleter );
  69789. }
  69790. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69791. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69792. template <typename Dispatch>
  69793. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createHeadlessSurfaceEXT( const VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69794. {
  69795. return static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69796. }
  69797. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69798. template <typename Dispatch>
  69799. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createHeadlessSurfaceEXT( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69800. {
  69801. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69802. Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69803. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXT" );
  69804. }
  69805. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69806. template <typename Dispatch>
  69807. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createHeadlessSurfaceEXTUnique( const HeadlessSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69808. {
  69809. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69810. Result result = static_cast<Result>( d.vkCreateHeadlessSurfaceEXT( m_instance, reinterpret_cast<const VkHeadlessSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69811. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69812. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createHeadlessSurfaceEXTUnique", deleter );
  69813. }
  69814. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69815. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69816. #ifdef VK_USE_PLATFORM_IOS_MVK
  69817. template <typename Dispatch>
  69818. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createIOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69819. {
  69820. return static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69821. }
  69822. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69823. template <typename Dispatch>
  69824. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createIOSSurfaceMVK( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69825. {
  69826. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69827. Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69828. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVK" );
  69829. }
  69830. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69831. template <typename Dispatch>
  69832. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createIOSSurfaceMVKUnique( const IOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69833. {
  69834. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69835. Result result = static_cast<Result>( d.vkCreateIOSSurfaceMVK( m_instance, reinterpret_cast<const VkIOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69836. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69837. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createIOSSurfaceMVKUnique", deleter );
  69838. }
  69839. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69840. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69841. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  69842. #ifdef VK_USE_PLATFORM_FUCHSIA
  69843. template <typename Dispatch>
  69844. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createImagePipeSurfaceFUCHSIA( const VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69845. {
  69846. return static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69847. }
  69848. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69849. template <typename Dispatch>
  69850. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createImagePipeSurfaceFUCHSIA( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69851. {
  69852. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69853. Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69854. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIA" );
  69855. }
  69856. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69857. template <typename Dispatch>
  69858. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createImagePipeSurfaceFUCHSIAUnique( const ImagePipeSurfaceCreateInfoFUCHSIA & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69859. {
  69860. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69861. Result result = static_cast<Result>( d.vkCreateImagePipeSurfaceFUCHSIA( m_instance, reinterpret_cast<const VkImagePipeSurfaceCreateInfoFUCHSIA *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69862. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69863. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createImagePipeSurfaceFUCHSIAUnique", deleter );
  69864. }
  69865. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69866. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69867. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  69868. #ifdef VK_USE_PLATFORM_MACOS_MVK
  69869. template <typename Dispatch>
  69870. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMacOSSurfaceMVK( const VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69871. {
  69872. return static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69873. }
  69874. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69875. template <typename Dispatch>
  69876. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMacOSSurfaceMVK( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69877. {
  69878. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69879. Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69880. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVK" );
  69881. }
  69882. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69883. template <typename Dispatch>
  69884. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMacOSSurfaceMVKUnique( const MacOSSurfaceCreateInfoMVK & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69885. {
  69886. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69887. Result result = static_cast<Result>( d.vkCreateMacOSSurfaceMVK( m_instance, reinterpret_cast<const VkMacOSSurfaceCreateInfoMVK *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69888. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69889. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMacOSSurfaceMVKUnique", deleter );
  69890. }
  69891. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69892. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69893. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  69894. #ifdef VK_USE_PLATFORM_METAL_EXT
  69895. template <typename Dispatch>
  69896. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createMetalSurfaceEXT( const VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69897. {
  69898. return static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69899. }
  69900. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69901. template <typename Dispatch>
  69902. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createMetalSurfaceEXT( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69903. {
  69904. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69905. Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69906. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXT" );
  69907. }
  69908. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69909. template <typename Dispatch>
  69910. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createMetalSurfaceEXTUnique( const MetalSurfaceCreateInfoEXT & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69911. {
  69912. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69913. Result result = static_cast<Result>( d.vkCreateMetalSurfaceEXT( m_instance, reinterpret_cast<const VkMetalSurfaceCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69914. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69915. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createMetalSurfaceEXTUnique", deleter );
  69916. }
  69917. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69918. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69919. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  69920. #ifdef VK_USE_PLATFORM_GGP
  69921. template <typename Dispatch>
  69922. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69923. {
  69924. return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69925. }
  69926. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69927. template <typename Dispatch>
  69928. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69929. {
  69930. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69931. Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69932. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" );
  69933. }
  69934. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69935. template <typename Dispatch>
  69936. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69937. {
  69938. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69939. Result result = static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69940. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69941. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique", deleter );
  69942. }
  69943. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69944. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69945. #endif /*VK_USE_PLATFORM_GGP*/
  69946. #ifdef VK_USE_PLATFORM_VI_NN
  69947. template <typename Dispatch>
  69948. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69949. {
  69950. return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69951. }
  69952. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69953. template <typename Dispatch>
  69954. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69955. {
  69956. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69957. Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69958. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" );
  69959. }
  69960. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69961. template <typename Dispatch>
  69962. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createViSurfaceNNUnique( const ViSurfaceCreateInfoNN & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69963. {
  69964. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69965. Result result = static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69966. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69967. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique", deleter );
  69968. }
  69969. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69970. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69971. #endif /*VK_USE_PLATFORM_VI_NN*/
  69972. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  69973. template <typename Dispatch>
  69974. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  69975. {
  69976. return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  69977. }
  69978. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  69979. template <typename Dispatch>
  69980. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69981. {
  69982. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69983. Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69984. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" );
  69985. }
  69986. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  69987. template <typename Dispatch>
  69988. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const WaylandSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  69989. {
  69990. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  69991. Result result = static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  69992. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  69993. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique", deleter );
  69994. }
  69995. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  69996. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  69997. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  69998. #ifdef VK_USE_PLATFORM_WIN32_KHR
  69999. template <typename Dispatch>
  70000. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70001. {
  70002. return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  70003. }
  70004. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70005. template <typename Dispatch>
  70006. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70007. {
  70008. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70009. Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70010. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" );
  70011. }
  70012. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  70013. template <typename Dispatch>
  70014. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const Win32SurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70015. {
  70016. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70017. Result result = static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70018. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  70019. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique", deleter );
  70020. }
  70021. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  70022. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70023. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  70024. #ifdef VK_USE_PLATFORM_XCB_KHR
  70025. template <typename Dispatch>
  70026. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70027. {
  70028. return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  70029. }
  70030. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70031. template <typename Dispatch>
  70032. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70033. {
  70034. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70035. Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70036. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" );
  70037. }
  70038. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  70039. template <typename Dispatch>
  70040. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const XcbSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70041. {
  70042. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70043. Result result = static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70044. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  70045. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique", deleter );
  70046. }
  70047. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  70048. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70049. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  70050. #ifdef VK_USE_PLATFORM_XLIB_KHR
  70051. template <typename Dispatch>
  70052. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR* pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70053. {
  70054. return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkSurfaceKHR *>( pSurface ) ) );
  70055. }
  70056. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70057. template <typename Dispatch>
  70058. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70059. {
  70060. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70061. Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70062. return createResultValue( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" );
  70063. }
  70064. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  70065. template <typename Dispatch>
  70066. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const XlibSurfaceCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70067. {
  70068. VULKAN_HPP_NAMESPACE::SurfaceKHR surface;
  70069. Result result = static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ) );
  70070. ObjectDestroy<Instance, Dispatch> deleter( *this, allocator, d );
  70071. return createResultValue<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( result, surface, VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique", deleter );
  70072. }
  70073. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  70074. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70075. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  70076. template <typename Dispatch>
  70077. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70078. {
  70079. d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage );
  70080. }
  70081. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70082. template <typename Dispatch>
  70083. VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70084. {
  70085. d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() );
  70086. }
  70087. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70088. template <typename Dispatch>
  70089. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70090. {
  70091. d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70092. }
  70093. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70094. template <typename Dispatch>
  70095. VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70096. {
  70097. d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70098. }
  70099. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70100. template <typename Dispatch>
  70101. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70102. {
  70103. d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70104. }
  70105. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70106. template <typename Dispatch>
  70107. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70108. {
  70109. d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70110. }
  70111. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70112. template <typename Dispatch>
  70113. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70114. {
  70115. d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70116. }
  70117. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70118. template <typename Dispatch>
  70119. VULKAN_HPP_INLINE void Instance::destroyDebugUtilsMessengerEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70120. {
  70121. d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70122. }
  70123. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70124. template <typename Dispatch>
  70125. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70126. {
  70127. d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70128. }
  70129. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70130. template <typename Dispatch>
  70131. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT messenger, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70132. {
  70133. d.vkDestroyDebugUtilsMessengerEXT( m_instance, static_cast<VkDebugUtilsMessengerEXT>( messenger ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70134. }
  70135. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70136. template <typename Dispatch>
  70137. VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70138. {
  70139. d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70140. }
  70141. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70142. template <typename Dispatch>
  70143. VULKAN_HPP_INLINE void Instance::destroy( Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70144. {
  70145. d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70146. }
  70147. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70148. template <typename Dispatch>
  70149. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70150. {
  70151. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70152. }
  70153. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70154. template <typename Dispatch>
  70155. VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70156. {
  70157. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70158. }
  70159. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70160. template <typename Dispatch>
  70161. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70162. {
  70163. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
  70164. }
  70165. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70166. template <typename Dispatch>
  70167. VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70168. {
  70169. d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ) );
  70170. }
  70171. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70172. template <typename Dispatch>
  70173. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70174. {
  70175. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast< VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  70176. }
  70177. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70178. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  70179. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
  70180. {
  70181. std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  70182. uint32_t physicalDeviceGroupCount;
  70183. Result result;
  70184. do
  70185. {
  70186. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
  70187. if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
  70188. {
  70189. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70190. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
  70191. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  70192. }
  70193. } while ( result == Result::eIncomplete );
  70194. if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
  70195. {
  70196. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70197. }
  70198. return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
  70199. }
  70200. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type >
  70201. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  70202. {
  70203. std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
  70204. uint32_t physicalDeviceGroupCount;
  70205. Result result;
  70206. do
  70207. {
  70208. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ) );
  70209. if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
  70210. {
  70211. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70212. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
  70213. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  70214. }
  70215. } while ( result == Result::eIncomplete );
  70216. if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
  70217. {
  70218. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70219. }
  70220. return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroups" );
  70221. }
  70222. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70223. template <typename Dispatch>
  70224. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t* pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70225. {
  70226. return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast< VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
  70227. }
  70228. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70229. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
  70230. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const
  70231. {
  70232. std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
  70233. uint32_t physicalDeviceGroupCount;
  70234. Result result;
  70235. do
  70236. {
  70237. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
  70238. if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
  70239. {
  70240. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70241. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
  70242. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  70243. }
  70244. } while ( result == Result::eIncomplete );
  70245. if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
  70246. {
  70247. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70248. }
  70249. return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
  70250. }
  70251. template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceGroupProperties>::value, int>::type >
  70252. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
  70253. {
  70254. std::vector<PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
  70255. uint32_t physicalDeviceGroupCount;
  70256. Result result;
  70257. do
  70258. {
  70259. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ) );
  70260. if ( ( result == Result::eSuccess ) && physicalDeviceGroupCount )
  70261. {
  70262. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70263. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ) );
  70264. VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
  70265. }
  70266. } while ( result == Result::eIncomplete );
  70267. if ( ( result == Result::eSuccess ) && ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) )
  70268. {
  70269. physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
  70270. }
  70271. return createResultValue( result, physicalDeviceGroupProperties, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDeviceGroupsKHR" );
  70272. }
  70273. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70274. template <typename Dispatch>
  70275. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t* pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice* pPhysicalDevices, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70276. {
  70277. return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast< VkPhysicalDevice *>( pPhysicalDevices ) ) );
  70278. }
  70279. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70280. template <typename PhysicalDeviceAllocator, typename Dispatch>
  70281. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const
  70282. {
  70283. std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
  70284. uint32_t physicalDeviceCount;
  70285. Result result;
  70286. do
  70287. {
  70288. result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
  70289. if ( ( result == Result::eSuccess ) && physicalDeviceCount )
  70290. {
  70291. physicalDevices.resize( physicalDeviceCount );
  70292. result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
  70293. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  70294. }
  70295. } while ( result == Result::eIncomplete );
  70296. if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
  70297. {
  70298. physicalDevices.resize( physicalDeviceCount );
  70299. }
  70300. return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
  70301. }
  70302. template <typename PhysicalDeviceAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDevice>::value, int>::type >
  70303. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
  70304. {
  70305. std::vector<PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
  70306. uint32_t physicalDeviceCount;
  70307. Result result;
  70308. do
  70309. {
  70310. result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ) );
  70311. if ( ( result == Result::eSuccess ) && physicalDeviceCount )
  70312. {
  70313. physicalDevices.resize( physicalDeviceCount );
  70314. result = static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ) );
  70315. VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
  70316. }
  70317. } while ( result == Result::eIncomplete );
  70318. if ( ( result == Result::eSuccess ) && ( physicalDeviceCount < physicalDevices.size() ) )
  70319. {
  70320. physicalDevices.resize( physicalDeviceCount );
  70321. }
  70322. return createResultValue( result, physicalDevices, VULKAN_HPP_NAMESPACE_STRING"::Instance::enumeratePhysicalDevices" );
  70323. }
  70324. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70325. template <typename Dispatch>
  70326. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char* pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70327. {
  70328. return d.vkGetInstanceProcAddr( m_instance, pName );
  70329. }
  70330. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70331. template <typename Dispatch>
  70332. VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70333. {
  70334. return d.vkGetInstanceProcAddr( m_instance, name.c_str() );
  70335. }
  70336. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70337. template <typename Dispatch>
  70338. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT* pCallbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70339. {
  70340. d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( pCallbackData ) );
  70341. }
  70342. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70343. template <typename Dispatch>
  70344. VULKAN_HPP_INLINE void Instance::submitDebugUtilsMessageEXT( VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT messageTypes, const DebugUtilsMessengerCallbackDataEXT & callbackData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70345. {
  70346. d.vkSubmitDebugUtilsMessageEXT( m_instance, static_cast<VkDebugUtilsMessageSeverityFlagBitsEXT>( messageSeverity ), static_cast<VkDebugUtilsMessageTypeFlagsEXT>( messageTypes ), reinterpret_cast<const VkDebugUtilsMessengerCallbackDataEXT *>( &callbackData ) );
  70347. }
  70348. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70349. #ifdef VK_USE_PLATFORM_WIN32_KHR
  70350. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70351. template <typename Dispatch>
  70352. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70353. {
  70354. return static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  70355. }
  70356. #else
  70357. template <typename Dispatch>
  70358. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireWinrtDisplayNV( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  70359. {
  70360. Result result = static_cast<Result>( d.vkAcquireWinrtDisplayNV( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  70361. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireWinrtDisplayNV" );
  70362. }
  70363. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70364. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  70365. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  70366. template <typename Dispatch>
  70367. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display* dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70368. {
  70369. return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) );
  70370. }
  70371. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70372. template <typename Dispatch>
  70373. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  70374. {
  70375. Result result = static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ) );
  70376. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" );
  70377. }
  70378. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70379. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  70380. template <typename Dispatch>
  70381. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::Device* pDevice, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70382. {
  70383. return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDevice *>( pDevice ) ) );
  70384. }
  70385. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70386. template <typename Dispatch>
  70387. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70388. {
  70389. VULKAN_HPP_NAMESPACE::Device device;
  70390. Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) ) );
  70391. return createResultValue( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
  70392. }
  70393. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  70394. template <typename Dispatch>
  70395. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const DeviceCreateInfo & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70396. {
  70397. VULKAN_HPP_NAMESPACE::Device device;
  70398. Result result = static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) ) );
  70399. ObjectDestroy<NoParent, Dispatch> deleter( allocator, d );
  70400. return createResultValue<VULKAN_HPP_NAMESPACE::Device, Dispatch>( result, device, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique", deleter );
  70401. }
  70402. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  70403. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70404. template <typename Dispatch>
  70405. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR* pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks* pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR* pMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70406. {
  70407. return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast< VkDisplayModeKHR *>( pMode ) ) );
  70408. }
  70409. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70410. template <typename Dispatch>
  70411. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70412. {
  70413. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  70414. Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
  70415. return createResultValue( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" );
  70416. }
  70417. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  70418. template <typename Dispatch>
  70419. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const DisplayModeCreateInfoKHR & createInfo, Optional<const AllocationCallbacks> allocator, Dispatch const & d ) const
  70420. {
  70421. VULKAN_HPP_NAMESPACE::DisplayModeKHR mode;
  70422. Result result = static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks*>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) ) );
  70423. ObjectDestroy<PhysicalDevice, Dispatch> deleter( *this, allocator, d );
  70424. return createResultValue<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( result, mode, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique", deleter );
  70425. }
  70426. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  70427. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70428. template <typename Dispatch>
  70429. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70430. {
  70431. return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast< VkExtensionProperties *>( pProperties ) ) );
  70432. }
  70433. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70434. template <typename ExtensionPropertiesAllocator, typename Dispatch>
  70435. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
  70436. {
  70437. std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties;
  70438. uint32_t propertyCount;
  70439. Result result;
  70440. do
  70441. {
  70442. result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
  70443. if ( ( result == Result::eSuccess ) && propertyCount )
  70444. {
  70445. properties.resize( propertyCount );
  70446. result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
  70447. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70448. }
  70449. } while ( result == Result::eIncomplete );
  70450. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70451. {
  70452. properties.resize( propertyCount );
  70453. }
  70454. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
  70455. }
  70456. template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, ExtensionProperties>::value, int>::type >
  70457. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const
  70458. {
  70459. std::vector<ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
  70460. uint32_t propertyCount;
  70461. Result result;
  70462. do
  70463. {
  70464. result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ) );
  70465. if ( ( result == Result::eSuccess ) && propertyCount )
  70466. {
  70467. properties.resize( propertyCount );
  70468. result = static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ) );
  70469. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70470. }
  70471. } while ( result == Result::eIncomplete );
  70472. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70473. {
  70474. properties.resize( propertyCount );
  70475. }
  70476. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceExtensionProperties" );
  70477. }
  70478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70479. template <typename Dispatch>
  70480. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70481. {
  70482. return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast< VkLayerProperties *>( pProperties ) ) );
  70483. }
  70484. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70485. template <typename LayerPropertiesAllocator, typename Dispatch>
  70486. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
  70487. {
  70488. std::vector<LayerProperties, LayerPropertiesAllocator> properties;
  70489. uint32_t propertyCount;
  70490. Result result;
  70491. do
  70492. {
  70493. result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
  70494. if ( ( result == Result::eSuccess ) && propertyCount )
  70495. {
  70496. properties.resize( propertyCount );
  70497. result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
  70498. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70499. }
  70500. } while ( result == Result::eIncomplete );
  70501. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70502. {
  70503. properties.resize( propertyCount );
  70504. }
  70505. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
  70506. }
  70507. template <typename LayerPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, LayerProperties>::value, int>::type >
  70508. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
  70509. {
  70510. std::vector<LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
  70511. uint32_t propertyCount;
  70512. Result result;
  70513. do
  70514. {
  70515. result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ) );
  70516. if ( ( result == Result::eSuccess ) && propertyCount )
  70517. {
  70518. properties.resize( propertyCount );
  70519. result = static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ) );
  70520. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70521. }
  70522. } while ( result == Result::eIncomplete );
  70523. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70524. {
  70525. properties.resize( propertyCount );
  70526. }
  70527. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateDeviceLayerProperties" );
  70528. }
  70529. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70530. template <typename Dispatch>
  70531. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t* pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR* pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR* pCounterDescriptions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70532. {
  70533. return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast< VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast< VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) );
  70534. }
  70535. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70536. template <typename Allocator, typename Dispatch>
  70537. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  70538. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Dispatch const &d ) const
  70539. {
  70540. std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions;
  70541. uint32_t counterCount;
  70542. Result result;
  70543. do
  70544. {
  70545. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
  70546. if ( ( result == Result::eSuccess ) && counterCount )
  70547. {
  70548. counterDescriptions.resize( counterCount );
  70549. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
  70550. }
  70551. } while ( result == Result::eIncomplete );
  70552. if ( result == Result::eSuccess )
  70553. {
  70554. VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
  70555. counterDescriptions.resize( counterCount );
  70556. }
  70557. return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  70558. }
  70559. template <typename Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PerformanceCounterDescriptionKHR>::value, int>::type>
  70560. VULKAN_HPP_DEPRECATED( "This function is deprecated. Use one of the other flavours of it.")
  70561. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PerformanceCounterDescriptionKHR,Allocator>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, ArrayProxy<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR> const &counters, Allocator const& vectorAllocator, Dispatch const &d ) const
  70562. {
  70563. std::vector<PerformanceCounterDescriptionKHR,Allocator> counterDescriptions( vectorAllocator );
  70564. uint32_t counterCount;
  70565. Result result;
  70566. do
  70567. {
  70568. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), nullptr ) );
  70569. if ( ( result == Result::eSuccess ) && counterCount )
  70570. {
  70571. counterDescriptions.resize( counterCount );
  70572. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, counters.size() , reinterpret_cast<VkPerformanceCounterKHR*>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR*>( counterDescriptions.data() ) ) );
  70573. }
  70574. } while ( result == Result::eIncomplete );
  70575. if ( result == Result::eSuccess )
  70576. {
  70577. VULKAN_HPP_ASSERT( counterCount <= counterDescriptions.size() );
  70578. counterDescriptions.resize( counterCount );
  70579. }
  70580. return createResultValue( result, counterDescriptions, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  70581. }
  70582. template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch>
  70583. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const
  70584. {
  70585. std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data;
  70586. std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
  70587. std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
  70588. uint32_t counterCount;
  70589. Result result;
  70590. do
  70591. {
  70592. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
  70593. if ( ( result == Result::eSuccess ) && counterCount )
  70594. {
  70595. counters.resize( counterCount );
  70596. counterDescriptions.resize( counterCount );
  70597. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
  70598. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  70599. }
  70600. } while ( result == Result::eIncomplete );
  70601. if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
  70602. {
  70603. counters.resize( counterCount );
  70604. counterDescriptions.resize( counterCount );
  70605. }
  70606. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  70607. }
  70608. template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type >
  70609. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d ) const
  70610. {
  70611. std::pair<std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data( std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) );
  70612. std::vector<PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first;
  70613. std::vector<PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second;
  70614. uint32_t counterCount;
  70615. Result result;
  70616. do
  70617. {
  70618. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ) );
  70619. if ( ( result == Result::eSuccess ) && counterCount )
  70620. {
  70621. counters.resize( counterCount );
  70622. counterDescriptions.resize( counterCount );
  70623. result = static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ) );
  70624. VULKAN_HPP_ASSERT( counterCount <= counters.size() );
  70625. }
  70626. } while ( result == Result::eIncomplete );
  70627. if ( ( result == Result::eSuccess ) && ( counterCount < counters.size() ) )
  70628. {
  70629. counters.resize( counterCount );
  70630. counterDescriptions.resize( counterCount );
  70631. }
  70632. return createResultValue( result, data, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" );
  70633. }
  70634. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70635. template <typename Dispatch>
  70636. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70637. {
  70638. return static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast< VkDisplayModeProperties2KHR *>( pProperties ) ) );
  70639. }
  70640. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70641. template <typename DisplayModeProperties2KHRAllocator, typename Dispatch>
  70642. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  70643. {
  70644. std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties;
  70645. uint32_t propertyCount;
  70646. Result result;
  70647. do
  70648. {
  70649. result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
  70650. if ( ( result == Result::eSuccess ) && propertyCount )
  70651. {
  70652. properties.resize( propertyCount );
  70653. result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
  70654. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70655. }
  70656. } while ( result == Result::eIncomplete );
  70657. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70658. {
  70659. properties.resize( propertyCount );
  70660. }
  70661. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
  70662. }
  70663. template <typename DisplayModeProperties2KHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModeProperties2KHR>::value, int>::type >
  70664. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator>>::type PhysicalDevice::getDisplayModeProperties2KHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModeProperties2KHRAllocator & displayModeProperties2KHRAllocator, Dispatch const & d ) const
  70665. {
  70666. std::vector<DisplayModeProperties2KHR, DisplayModeProperties2KHRAllocator> properties( displayModeProperties2KHRAllocator );
  70667. uint32_t propertyCount;
  70668. Result result;
  70669. do
  70670. {
  70671. result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
  70672. if ( ( result == Result::eSuccess ) && propertyCount )
  70673. {
  70674. properties.resize( propertyCount );
  70675. result = static_cast<Result>( d.vkGetDisplayModeProperties2KHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModeProperties2KHR *>( properties.data() ) ) );
  70676. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70677. }
  70678. } while ( result == Result::eIncomplete );
  70679. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70680. {
  70681. properties.resize( propertyCount );
  70682. }
  70683. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModeProperties2KHR" );
  70684. }
  70685. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70686. template <typename Dispatch>
  70687. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70688. {
  70689. return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast< VkDisplayModePropertiesKHR *>( pProperties ) ) );
  70690. }
  70691. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70692. template <typename DisplayModePropertiesKHRAllocator, typename Dispatch>
  70693. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  70694. {
  70695. std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties;
  70696. uint32_t propertyCount;
  70697. Result result;
  70698. do
  70699. {
  70700. result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
  70701. if ( ( result == Result::eSuccess ) && propertyCount )
  70702. {
  70703. properties.resize( propertyCount );
  70704. result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
  70705. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70706. }
  70707. } while ( result == Result::eIncomplete );
  70708. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70709. {
  70710. properties.resize( propertyCount );
  70711. }
  70712. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
  70713. }
  70714. template <typename DisplayModePropertiesKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayModePropertiesKHR>::value, int>::type >
  70715. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d ) const
  70716. {
  70717. std::vector<DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator );
  70718. uint32_t propertyCount;
  70719. Result result;
  70720. do
  70721. {
  70722. result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ) );
  70723. if ( ( result == Result::eSuccess ) && propertyCount )
  70724. {
  70725. properties.resize( propertyCount );
  70726. result = static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ) );
  70727. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70728. }
  70729. } while ( result == Result::eIncomplete );
  70730. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70731. {
  70732. properties.resize( propertyCount );
  70733. }
  70734. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayModePropertiesKHR" );
  70735. }
  70736. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70737. template <typename Dispatch>
  70738. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilities2KHR( const VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR* pDisplayPlaneInfo, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR* pCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70739. {
  70740. return static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( pDisplayPlaneInfo ), reinterpret_cast< VkDisplayPlaneCapabilities2KHR *>( pCapabilities ) ) );
  70741. }
  70742. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70743. template <typename Dispatch>
  70744. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::type PhysicalDevice::getDisplayPlaneCapabilities2KHR( const DisplayPlaneInfo2KHR & displayPlaneInfo, Dispatch const & d ) const
  70745. {
  70746. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR capabilities;
  70747. Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkDisplayPlaneInfo2KHR *>( &displayPlaneInfo ), reinterpret_cast<VkDisplayPlaneCapabilities2KHR *>( &capabilities ) ) );
  70748. return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilities2KHR" );
  70749. }
  70750. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70751. template <typename Dispatch>
  70752. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR* pCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70753. {
  70754. return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast< VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) );
  70755. }
  70756. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70757. template <typename Dispatch>
  70758. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const
  70759. {
  70760. VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities;
  70761. Result result = static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ) );
  70762. return createResultValue( result, capabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" );
  70763. }
  70764. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70765. template <typename Dispatch>
  70766. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t* pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplays, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70767. {
  70768. return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast< VkDisplayKHR *>( pDisplays ) ) );
  70769. }
  70770. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70771. template <typename DisplayKHRAllocator, typename Dispatch>
  70772. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const
  70773. {
  70774. std::vector<DisplayKHR, DisplayKHRAllocator> displays;
  70775. uint32_t displayCount;
  70776. Result result;
  70777. do
  70778. {
  70779. result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
  70780. if ( ( result == Result::eSuccess ) && displayCount )
  70781. {
  70782. displays.resize( displayCount );
  70783. result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
  70784. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  70785. }
  70786. } while ( result == Result::eIncomplete );
  70787. if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
  70788. {
  70789. displays.resize( displayCount );
  70790. }
  70791. return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  70792. }
  70793. template <typename DisplayKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayKHR>::value, int>::type >
  70794. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const
  70795. {
  70796. std::vector<DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator );
  70797. uint32_t displayCount;
  70798. Result result;
  70799. do
  70800. {
  70801. result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ) );
  70802. if ( ( result == Result::eSuccess ) && displayCount )
  70803. {
  70804. displays.resize( displayCount );
  70805. result = static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ) );
  70806. VULKAN_HPP_ASSERT( displayCount <= displays.size() );
  70807. }
  70808. } while ( result == Result::eIncomplete );
  70809. if ( ( result == Result::eSuccess ) && ( displayCount < displays.size() ) )
  70810. {
  70811. displays.resize( displayCount );
  70812. }
  70813. return createResultValue( result, displays, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" );
  70814. }
  70815. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70816. template <typename Dispatch>
  70817. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCalibrateableTimeDomainsEXT( uint32_t* pTimeDomainCount, VULKAN_HPP_NAMESPACE::TimeDomainEXT* pTimeDomains, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70818. {
  70819. return static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, pTimeDomainCount, reinterpret_cast< VkTimeDomainEXT *>( pTimeDomains ) ) );
  70820. }
  70821. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70822. template <typename TimeDomainEXTAllocator, typename Dispatch>
  70823. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( Dispatch const & d ) const
  70824. {
  70825. std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains;
  70826. uint32_t timeDomainCount;
  70827. Result result;
  70828. do
  70829. {
  70830. result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
  70831. if ( ( result == Result::eSuccess ) && timeDomainCount )
  70832. {
  70833. timeDomains.resize( timeDomainCount );
  70834. result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
  70835. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  70836. }
  70837. } while ( result == Result::eIncomplete );
  70838. if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
  70839. {
  70840. timeDomains.resize( timeDomainCount );
  70841. }
  70842. return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  70843. }
  70844. template <typename TimeDomainEXTAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, TimeDomainEXT>::value, int>::type >
  70845. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<TimeDomainEXT, TimeDomainEXTAllocator>>::type PhysicalDevice::getCalibrateableTimeDomainsEXT( TimeDomainEXTAllocator & timeDomainEXTAllocator, Dispatch const & d ) const
  70846. {
  70847. std::vector<TimeDomainEXT, TimeDomainEXTAllocator> timeDomains( timeDomainEXTAllocator );
  70848. uint32_t timeDomainCount;
  70849. Result result;
  70850. do
  70851. {
  70852. result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, nullptr ) );
  70853. if ( ( result == Result::eSuccess ) && timeDomainCount )
  70854. {
  70855. timeDomains.resize( timeDomainCount );
  70856. result = static_cast<Result>( d.vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( m_physicalDevice, &timeDomainCount, reinterpret_cast<VkTimeDomainEXT *>( timeDomains.data() ) ) );
  70857. VULKAN_HPP_ASSERT( timeDomainCount <= timeDomains.size() );
  70858. }
  70859. } while ( result == Result::eIncomplete );
  70860. if ( ( result == Result::eSuccess ) && ( timeDomainCount < timeDomains.size() ) )
  70861. {
  70862. timeDomains.resize( timeDomainCount );
  70863. }
  70864. return createResultValue( result, timeDomains, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCalibrateableTimeDomainsEXT" );
  70865. }
  70866. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70867. template <typename Dispatch>
  70868. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getCooperativeMatrixPropertiesNV( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70869. {
  70870. return static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, pPropertyCount, reinterpret_cast< VkCooperativeMatrixPropertiesNV *>( pProperties ) ) );
  70871. }
  70872. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70873. template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch>
  70874. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( Dispatch const & d ) const
  70875. {
  70876. std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties;
  70877. uint32_t propertyCount;
  70878. Result result;
  70879. do
  70880. {
  70881. result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
  70882. if ( ( result == Result::eSuccess ) && propertyCount )
  70883. {
  70884. properties.resize( propertyCount );
  70885. result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
  70886. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70887. }
  70888. } while ( result == Result::eIncomplete );
  70889. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70890. {
  70891. properties.resize( propertyCount );
  70892. }
  70893. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  70894. }
  70895. template <typename CooperativeMatrixPropertiesNVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CooperativeMatrixPropertiesNV>::value, int>::type >
  70896. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator>>::type PhysicalDevice::getCooperativeMatrixPropertiesNV( CooperativeMatrixPropertiesNVAllocator & cooperativeMatrixPropertiesNVAllocator, Dispatch const & d ) const
  70897. {
  70898. std::vector<CooperativeMatrixPropertiesNV, CooperativeMatrixPropertiesNVAllocator> properties( cooperativeMatrixPropertiesNVAllocator );
  70899. uint32_t propertyCount;
  70900. Result result;
  70901. do
  70902. {
  70903. result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, nullptr ) );
  70904. if ( ( result == Result::eSuccess ) && propertyCount )
  70905. {
  70906. properties.resize( propertyCount );
  70907. result = static_cast<Result>( d.vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( m_physicalDevice, &propertyCount, reinterpret_cast<VkCooperativeMatrixPropertiesNV *>( properties.data() ) ) );
  70908. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70909. }
  70910. } while ( result == Result::eIncomplete );
  70911. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70912. {
  70913. properties.resize( propertyCount );
  70914. }
  70915. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getCooperativeMatrixPropertiesNV" );
  70916. }
  70917. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70918. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  70919. template <typename Dispatch>
  70920. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB* dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70921. {
  70922. return static_cast<Bool32>( d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, dfb ) );
  70923. }
  70924. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70925. template <typename Dispatch>
  70926. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getDirectFBPresentationSupportEXT( uint32_t queueFamilyIndex, IDirectFB & dfb, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70927. {
  70928. return d.vkGetPhysicalDeviceDirectFBPresentationSupportEXT( m_physicalDevice, queueFamilyIndex, &dfb );
  70929. }
  70930. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70931. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  70932. template <typename Dispatch>
  70933. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70934. {
  70935. return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast< VkDisplayPlaneProperties2KHR *>( pProperties ) ) );
  70936. }
  70937. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70938. template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch>
  70939. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const
  70940. {
  70941. std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties;
  70942. uint32_t propertyCount;
  70943. Result result;
  70944. do
  70945. {
  70946. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
  70947. if ( ( result == Result::eSuccess ) && propertyCount )
  70948. {
  70949. properties.resize( propertyCount );
  70950. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
  70951. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70952. }
  70953. } while ( result == Result::eIncomplete );
  70954. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70955. {
  70956. properties.resize( propertyCount );
  70957. }
  70958. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  70959. }
  70960. template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlaneProperties2KHR>::value, int>::type >
  70961. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const
  70962. {
  70963. std::vector<DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator );
  70964. uint32_t propertyCount;
  70965. Result result;
  70966. do
  70967. {
  70968. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
  70969. if ( ( result == Result::eSuccess ) && propertyCount )
  70970. {
  70971. properties.resize( propertyCount );
  70972. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ) );
  70973. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  70974. }
  70975. } while ( result == Result::eIncomplete );
  70976. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  70977. {
  70978. properties.resize( propertyCount );
  70979. }
  70980. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlaneProperties2KHR" );
  70981. }
  70982. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  70983. template <typename Dispatch>
  70984. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  70985. {
  70986. return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast< VkDisplayPlanePropertiesKHR *>( pProperties ) ) );
  70987. }
  70988. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  70989. template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch>
  70990. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const
  70991. {
  70992. std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties;
  70993. uint32_t propertyCount;
  70994. Result result;
  70995. do
  70996. {
  70997. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
  70998. if ( ( result == Result::eSuccess ) && propertyCount )
  70999. {
  71000. properties.resize( propertyCount );
  71001. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
  71002. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71003. }
  71004. } while ( result == Result::eIncomplete );
  71005. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71006. {
  71007. properties.resize( propertyCount );
  71008. }
  71009. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  71010. }
  71011. template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPlanePropertiesKHR>::value, int>::type >
  71012. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const
  71013. {
  71014. std::vector<DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator );
  71015. uint32_t propertyCount;
  71016. Result result;
  71017. do
  71018. {
  71019. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
  71020. if ( ( result == Result::eSuccess ) && propertyCount )
  71021. {
  71022. properties.resize( propertyCount );
  71023. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ) );
  71024. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71025. }
  71026. } while ( result == Result::eIncomplete );
  71027. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71028. {
  71029. properties.resize( propertyCount );
  71030. }
  71031. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPlanePropertiesKHR" );
  71032. }
  71033. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71034. template <typename Dispatch>
  71035. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71036. {
  71037. return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast< VkDisplayProperties2KHR *>( pProperties ) ) );
  71038. }
  71039. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71040. template <typename DisplayProperties2KHRAllocator, typename Dispatch>
  71041. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const
  71042. {
  71043. std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties;
  71044. uint32_t propertyCount;
  71045. Result result;
  71046. do
  71047. {
  71048. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
  71049. if ( ( result == Result::eSuccess ) && propertyCount )
  71050. {
  71051. properties.resize( propertyCount );
  71052. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
  71053. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71054. }
  71055. } while ( result == Result::eIncomplete );
  71056. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71057. {
  71058. properties.resize( propertyCount );
  71059. }
  71060. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
  71061. }
  71062. template <typename DisplayProperties2KHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayProperties2KHR>::value, int>::type >
  71063. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const
  71064. {
  71065. std::vector<DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator );
  71066. uint32_t propertyCount;
  71067. Result result;
  71068. do
  71069. {
  71070. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ) );
  71071. if ( ( result == Result::eSuccess ) && propertyCount )
  71072. {
  71073. properties.resize( propertyCount );
  71074. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ) );
  71075. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71076. }
  71077. } while ( result == Result::eIncomplete );
  71078. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71079. {
  71080. properties.resize( propertyCount );
  71081. }
  71082. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayProperties2KHR" );
  71083. }
  71084. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71085. template <typename Dispatch>
  71086. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71087. {
  71088. return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast< VkDisplayPropertiesKHR *>( pProperties ) ) );
  71089. }
  71090. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71091. template <typename DisplayPropertiesKHRAllocator, typename Dispatch>
  71092. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const
  71093. {
  71094. std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties;
  71095. uint32_t propertyCount;
  71096. Result result;
  71097. do
  71098. {
  71099. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
  71100. if ( ( result == Result::eSuccess ) && propertyCount )
  71101. {
  71102. properties.resize( propertyCount );
  71103. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
  71104. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71105. }
  71106. } while ( result == Result::eIncomplete );
  71107. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71108. {
  71109. properties.resize( propertyCount );
  71110. }
  71111. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
  71112. }
  71113. template <typename DisplayPropertiesKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, DisplayPropertiesKHR>::value, int>::type >
  71114. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const
  71115. {
  71116. std::vector<DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator );
  71117. uint32_t propertyCount;
  71118. Result result;
  71119. do
  71120. {
  71121. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ) );
  71122. if ( ( result == Result::eSuccess ) && propertyCount )
  71123. {
  71124. properties.resize( propertyCount );
  71125. result = static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ) );
  71126. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71127. }
  71128. } while ( result == Result::eIncomplete );
  71129. if ( ( result == Result::eSuccess ) && ( propertyCount < properties.size() ) )
  71130. {
  71131. properties.resize( propertyCount );
  71132. }
  71133. return createResultValue( result, properties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getDisplayPropertiesKHR" );
  71134. }
  71135. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71136. template <typename Dispatch>
  71137. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71138. {
  71139. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( pExternalBufferProperties ) );
  71140. }
  71141. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71142. template <typename Dispatch>
  71143. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71144. {
  71145. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  71146. d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  71147. return externalBufferProperties;
  71148. }
  71149. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71150. template <typename Dispatch>
  71151. VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties* pExternalBufferProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71152. {
  71153. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast< VkExternalBufferProperties *>( pExternalBufferProperties ) );
  71154. }
  71155. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71156. template <typename Dispatch>
  71157. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71158. {
  71159. VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
  71160. d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
  71161. return externalBufferProperties;
  71162. }
  71163. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71164. template <typename Dispatch>
  71165. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71166. {
  71167. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( pExternalFenceProperties ) );
  71168. }
  71169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71170. template <typename Dispatch>
  71171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71172. {
  71173. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  71174. d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  71175. return externalFenceProperties;
  71176. }
  71177. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71178. template <typename Dispatch>
  71179. VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties* pExternalFenceProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71180. {
  71181. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast< VkExternalFenceProperties *>( pExternalFenceProperties ) );
  71182. }
  71183. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71184. template <typename Dispatch>
  71185. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71186. {
  71187. VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
  71188. d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
  71189. return externalFenceProperties;
  71190. }
  71191. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71192. template <typename Dispatch>
  71193. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV* pExternalImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71194. {
  71195. return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast< VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) );
  71196. }
  71197. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71198. template <typename Dispatch>
  71199. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const & d ) const
  71200. {
  71201. VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties;
  71202. Result result = static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ) );
  71203. return createResultValue( result, externalImageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" );
  71204. }
  71205. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71206. template <typename Dispatch>
  71207. VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71208. {
  71209. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  71210. }
  71211. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71212. template <typename Dispatch>
  71213. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71214. {
  71215. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  71216. d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  71217. return externalSemaphoreProperties;
  71218. }
  71219. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71220. template <typename Dispatch>
  71221. VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties* pExternalSemaphoreProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71222. {
  71223. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast< VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
  71224. }
  71225. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71226. template <typename Dispatch>
  71227. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71228. {
  71229. VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
  71230. d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
  71231. return externalSemaphoreProperties;
  71232. }
  71233. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71234. template <typename Dispatch>
  71235. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures* pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71236. {
  71237. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures *>( pFeatures ) );
  71238. }
  71239. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71240. template <typename Dispatch>
  71241. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71242. {
  71243. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
  71244. d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
  71245. return features;
  71246. }
  71247. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71248. template <typename Dispatch>
  71249. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71250. {
  71251. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  71252. }
  71253. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71254. template <typename Dispatch>
  71255. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71256. {
  71257. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  71258. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  71259. return features;
  71260. }
  71261. template <typename X, typename Y, typename... Z, typename Dispatch>
  71262. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71263. {
  71264. StructureChain<X, Y, Z...> structureChain;
  71265. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  71266. d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  71267. return structureChain;
  71268. }
  71269. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71270. template <typename Dispatch>
  71271. VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2* pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71272. {
  71273. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceFeatures2 *>( pFeatures ) );
  71274. }
  71275. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71276. template <typename Dispatch>
  71277. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71278. {
  71279. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
  71280. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  71281. return features;
  71282. }
  71283. template <typename X, typename Y, typename... Z, typename Dispatch>
  71284. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71285. {
  71286. StructureChain<X, Y, Z...> structureChain;
  71287. VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
  71288. d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
  71289. return structureChain;
  71290. }
  71291. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71292. template <typename Dispatch>
  71293. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties* pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71294. {
  71295. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties *>( pFormatProperties ) );
  71296. }
  71297. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71298. template <typename Dispatch>
  71299. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71300. {
  71301. VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
  71302. d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
  71303. return formatProperties;
  71304. }
  71305. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71306. template <typename Dispatch>
  71307. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71308. {
  71309. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( pFormatProperties ) );
  71310. }
  71311. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71312. template <typename Dispatch>
  71313. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71314. {
  71315. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  71316. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  71317. return formatProperties;
  71318. }
  71319. template <typename X, typename Y, typename... Z, typename Dispatch>
  71320. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71321. {
  71322. StructureChain<X, Y, Z...> structureChain;
  71323. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  71324. d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  71325. return structureChain;
  71326. }
  71327. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71328. template <typename Dispatch>
  71329. VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2* pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71330. {
  71331. d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast< VkFormatProperties2 *>( pFormatProperties ) );
  71332. }
  71333. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71334. template <typename Dispatch>
  71335. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71336. {
  71337. VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
  71338. d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  71339. return formatProperties;
  71340. }
  71341. template <typename X, typename Y, typename... Z, typename Dispatch>
  71342. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71343. {
  71344. StructureChain<X, Y, Z...> structureChain;
  71345. VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
  71346. d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
  71347. return structureChain;
  71348. }
  71349. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71350. template <typename Dispatch>
  71351. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getFragmentShadingRatesKHR( uint32_t* pFragmentShadingRateCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71352. {
  71353. return static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, pFragmentShadingRateCount, reinterpret_cast< VkPhysicalDeviceFragmentShadingRateKHR *>( pFragmentShadingRates ) ) );
  71354. }
  71355. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71356. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch>
  71357. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( Dispatch const & d ) const
  71358. {
  71359. std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates;
  71360. uint32_t fragmentShadingRateCount;
  71361. Result result;
  71362. do
  71363. {
  71364. result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
  71365. if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
  71366. {
  71367. fragmentShadingRates.resize( fragmentShadingRateCount );
  71368. result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
  71369. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  71370. }
  71371. } while ( result == Result::eIncomplete );
  71372. if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
  71373. {
  71374. fragmentShadingRates.resize( fragmentShadingRateCount );
  71375. }
  71376. return createResultValue( result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getFragmentShadingRatesKHR" );
  71377. }
  71378. template <typename PhysicalDeviceFragmentShadingRateKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceFragmentShadingRateKHR>::value, int>::type >
  71379. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator>>::type PhysicalDevice::getFragmentShadingRatesKHR( PhysicalDeviceFragmentShadingRateKHRAllocator & physicalDeviceFragmentShadingRateKHRAllocator, Dispatch const & d ) const
  71380. {
  71381. std::vector<PhysicalDeviceFragmentShadingRateKHR, PhysicalDeviceFragmentShadingRateKHRAllocator> fragmentShadingRates( physicalDeviceFragmentShadingRateKHRAllocator );
  71382. uint32_t fragmentShadingRateCount;
  71383. Result result;
  71384. do
  71385. {
  71386. result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, nullptr ) );
  71387. if ( ( result == Result::eSuccess ) && fragmentShadingRateCount )
  71388. {
  71389. fragmentShadingRates.resize( fragmentShadingRateCount );
  71390. result = static_cast<Result>( d.vkGetPhysicalDeviceFragmentShadingRatesKHR( m_physicalDevice, &fragmentShadingRateCount, reinterpret_cast<VkPhysicalDeviceFragmentShadingRateKHR *>( fragmentShadingRates.data() ) ) );
  71391. VULKAN_HPP_ASSERT( fragmentShadingRateCount <= fragmentShadingRates.size() );
  71392. }
  71393. } while ( result == Result::eIncomplete );
  71394. if ( ( result == Result::eSuccess ) && ( fragmentShadingRateCount < fragmentShadingRates.size() ) )
  71395. {
  71396. fragmentShadingRates.resize( fragmentShadingRateCount );
  71397. }
  71398. return createResultValue( result, fragmentShadingRates, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getFragmentShadingRatesKHR" );
  71399. }
  71400. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71401. template <typename Dispatch>
  71402. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties* pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71403. {
  71404. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast< VkImageFormatProperties *>( pImageFormatProperties ) ) );
  71405. }
  71406. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71407. template <typename Dispatch>
  71408. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const
  71409. {
  71410. VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
  71411. Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ) );
  71412. return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
  71413. }
  71414. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71415. template <typename Dispatch>
  71416. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71417. {
  71418. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  71419. }
  71420. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71421. template <typename Dispatch>
  71422. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  71423. {
  71424. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  71425. Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
  71426. return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
  71427. }
  71428. template <typename X, typename Y, typename... Z, typename Dispatch>
  71429. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  71430. {
  71431. StructureChain<X, Y, Z...> structureChain;
  71432. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  71433. Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
  71434. return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2" );
  71435. }
  71436. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71437. template <typename Dispatch>
  71438. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2* pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2* pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71439. {
  71440. return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast< VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
  71441. }
  71442. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71443. template <typename Dispatch>
  71444. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  71445. {
  71446. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
  71447. Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
  71448. return createResultValue( result, imageFormatProperties, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" );
  71449. }
  71450. template <typename X, typename Y, typename... Z, typename Dispatch>
  71451. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
  71452. {
  71453. StructureChain<X, Y, Z...> structureChain;
  71454. VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
  71455. Result result = static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ) );
  71456. return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getImageFormatProperties2KHR" );
  71457. }
  71458. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71459. template <typename Dispatch>
  71460. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties* pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71461. {
  71462. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
  71463. }
  71464. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71465. template <typename Dispatch>
  71466. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71467. {
  71468. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
  71469. d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
  71470. return memoryProperties;
  71471. }
  71472. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71473. template <typename Dispatch>
  71474. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71475. {
  71476. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  71477. }
  71478. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71479. template <typename Dispatch>
  71480. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71481. {
  71482. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  71483. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  71484. return memoryProperties;
  71485. }
  71486. template <typename X, typename Y, typename... Z, typename Dispatch>
  71487. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71488. {
  71489. StructureChain<X, Y, Z...> structureChain;
  71490. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  71491. d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  71492. return structureChain;
  71493. }
  71494. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71495. template <typename Dispatch>
  71496. VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2* pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71497. {
  71498. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
  71499. }
  71500. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71501. template <typename Dispatch>
  71502. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71503. {
  71504. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
  71505. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  71506. return memoryProperties;
  71507. }
  71508. template <typename X, typename Y, typename... Z, typename Dispatch>
  71509. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71510. {
  71511. StructureChain<X, Y, Z...> structureChain;
  71512. VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
  71513. d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
  71514. return structureChain;
  71515. }
  71516. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71517. template <typename Dispatch>
  71518. VULKAN_HPP_INLINE void PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT* pMultisampleProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71519. {
  71520. d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast< VkMultisamplePropertiesEXT *>( pMultisampleProperties ) );
  71521. }
  71522. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71523. template <typename Dispatch>
  71524. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT PhysicalDevice::getMultisamplePropertiesEXT( VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71525. {
  71526. VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT multisampleProperties;
  71527. d.vkGetPhysicalDeviceMultisamplePropertiesEXT( m_physicalDevice, static_cast<VkSampleCountFlagBits>( samples ), reinterpret_cast<VkMultisamplePropertiesEXT *>( &multisampleProperties ) );
  71528. return multisampleProperties;
  71529. }
  71530. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71531. template <typename Dispatch>
  71532. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pRectCount, VULKAN_HPP_NAMESPACE::Rect2D* pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71533. {
  71534. return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast< VkRect2D *>( pRects ) ) );
  71535. }
  71536. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71537. template <typename Rect2DAllocator, typename Dispatch>
  71538. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  71539. {
  71540. std::vector<Rect2D, Rect2DAllocator> rects;
  71541. uint32_t rectCount;
  71542. Result result;
  71543. do
  71544. {
  71545. result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
  71546. if ( ( result == Result::eSuccess ) && rectCount )
  71547. {
  71548. rects.resize( rectCount );
  71549. result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) );
  71550. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  71551. }
  71552. } while ( result == Result::eIncomplete );
  71553. if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
  71554. {
  71555. rects.resize( rectCount );
  71556. }
  71557. return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
  71558. }
  71559. template <typename Rect2DAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, Rect2D>::value, int>::type >
  71560. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const
  71561. {
  71562. std::vector<Rect2D, Rect2DAllocator> rects( rect2DAllocator );
  71563. uint32_t rectCount;
  71564. Result result;
  71565. do
  71566. {
  71567. result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ) );
  71568. if ( ( result == Result::eSuccess ) && rectCount )
  71569. {
  71570. rects.resize( rectCount );
  71571. result = static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ) );
  71572. VULKAN_HPP_ASSERT( rectCount <= rects.size() );
  71573. }
  71574. } while ( result == Result::eIncomplete );
  71575. if ( ( result == Result::eSuccess ) && ( rectCount < rects.size() ) )
  71576. {
  71577. rects.resize( rectCount );
  71578. }
  71579. return createResultValue( result, rects, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getPresentRectanglesKHR" );
  71580. }
  71581. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71582. template <typename Dispatch>
  71583. VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71584. {
  71585. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties *>( pProperties ) );
  71586. }
  71587. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71588. template <typename Dispatch>
  71589. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71590. {
  71591. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
  71592. d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
  71593. return properties;
  71594. }
  71595. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71596. template <typename Dispatch>
  71597. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71598. {
  71599. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( pProperties ) );
  71600. }
  71601. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71602. template <typename Dispatch>
  71603. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71604. {
  71605. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  71606. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  71607. return properties;
  71608. }
  71609. template <typename X, typename Y, typename... Z, typename Dispatch>
  71610. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71611. {
  71612. StructureChain<X, Y, Z...> structureChain;
  71613. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  71614. d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  71615. return structureChain;
  71616. }
  71617. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71618. template <typename Dispatch>
  71619. VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71620. {
  71621. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast< VkPhysicalDeviceProperties2 *>( pProperties ) );
  71622. }
  71623. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71624. template <typename Dispatch>
  71625. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71626. {
  71627. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
  71628. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  71629. return properties;
  71630. }
  71631. template <typename X, typename Y, typename... Z, typename Dispatch>
  71632. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71633. {
  71634. StructureChain<X, Y, Z...> structureChain;
  71635. VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
  71636. d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
  71637. return structureChain;
  71638. }
  71639. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71640. template <typename Dispatch>
  71641. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71642. {
  71643. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses );
  71644. }
  71645. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71646. template <typename Dispatch>
  71647. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71648. {
  71649. uint32_t numPasses;
  71650. d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses );
  71651. return numPasses;
  71652. }
  71653. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71654. template <typename Dispatch>
  71655. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties* pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71656. {
  71657. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast< VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
  71658. }
  71659. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71660. template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
  71661. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
  71662. {
  71663. std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
  71664. uint32_t queueFamilyPropertyCount;
  71665. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71666. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71667. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  71668. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71669. return queueFamilyProperties;
  71670. }
  71671. template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties>::value, int>::type >
  71672. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
  71673. {
  71674. std::vector<QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
  71675. uint32_t queueFamilyPropertyCount;
  71676. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71677. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71678. d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
  71679. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71680. return queueFamilyProperties;
  71681. }
  71682. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71683. template <typename Dispatch>
  71684. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71685. {
  71686. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast< VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  71687. }
  71688. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71689. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  71690. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  71691. {
  71692. std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  71693. uint32_t queueFamilyPropertyCount;
  71694. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71695. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71696. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71697. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71698. return queueFamilyProperties;
  71699. }
  71700. template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type >
  71701. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  71702. {
  71703. std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  71704. uint32_t queueFamilyPropertyCount;
  71705. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71706. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71707. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71708. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71709. return queueFamilyProperties;
  71710. }
  71711. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  71712. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
  71713. {
  71714. uint32_t queueFamilyPropertyCount;
  71715. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71716. std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
  71717. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
  71718. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71719. {
  71720. queueFamilyProperties[i].pNext =
  71721. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  71722. }
  71723. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71724. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71725. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71726. {
  71727. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  71728. }
  71729. return returnVector;
  71730. }
  71731. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
  71732. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  71733. {
  71734. uint32_t queueFamilyPropertyCount;
  71735. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71736. std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount, structureChainAllocator );
  71737. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
  71738. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71739. {
  71740. queueFamilyProperties[i].pNext =
  71741. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  71742. }
  71743. d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71744. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71745. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71746. {
  71747. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  71748. }
  71749. return returnVector;
  71750. }
  71751. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71752. template <typename Dispatch>
  71753. VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t* pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2* pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71754. {
  71755. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast< VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
  71756. }
  71757. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71758. template <typename QueueFamilyProperties2Allocator, typename Dispatch>
  71759. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  71760. {
  71761. std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
  71762. uint32_t queueFamilyPropertyCount;
  71763. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71764. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71765. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71766. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71767. return queueFamilyProperties;
  71768. }
  71769. template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, QueueFamilyProperties2>::value, int>::type >
  71770. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
  71771. {
  71772. std::vector<QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
  71773. uint32_t queueFamilyPropertyCount;
  71774. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71775. queueFamilyProperties.resize( queueFamilyPropertyCount );
  71776. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71777. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71778. return queueFamilyProperties;
  71779. }
  71780. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
  71781. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const
  71782. {
  71783. uint32_t queueFamilyPropertyCount;
  71784. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71785. std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount );
  71786. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
  71787. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71788. {
  71789. queueFamilyProperties[i].pNext =
  71790. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  71791. }
  71792. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71793. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71794. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71795. {
  71796. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  71797. }
  71798. return returnVector;
  71799. }
  71800. template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, StructureChain>::value, int>::type>
  71801. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
  71802. {
  71803. uint32_t queueFamilyPropertyCount;
  71804. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
  71805. std::vector<StructureChain, StructureChainAllocator> returnVector( queueFamilyPropertyCount, structureChainAllocator );
  71806. std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties( queueFamilyPropertyCount );
  71807. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71808. {
  71809. queueFamilyProperties[i].pNext =
  71810. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
  71811. }
  71812. d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
  71813. VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
  71814. for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
  71815. {
  71816. returnVector[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
  71817. }
  71818. return returnVector;
  71819. }
  71820. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71821. template <typename Dispatch>
  71822. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71823. {
  71824. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties *>( pProperties ) );
  71825. }
  71826. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71827. template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
  71828. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d ) const
  71829. {
  71830. std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
  71831. uint32_t propertyCount;
  71832. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
  71833. properties.resize( propertyCount );
  71834. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  71835. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71836. return properties;
  71837. }
  71838. template <typename SparseImageFormatPropertiesAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties>::value, int>::type >
  71839. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d ) const
  71840. {
  71841. std::vector<SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
  71842. uint32_t propertyCount;
  71843. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
  71844. properties.resize( propertyCount );
  71845. d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
  71846. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71847. return properties;
  71848. }
  71849. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71850. template <typename Dispatch>
  71851. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71852. {
  71853. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties2 *>( pProperties ) );
  71854. }
  71855. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71856. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  71857. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
  71858. {
  71859. std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  71860. uint32_t propertyCount;
  71861. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  71862. properties.resize( propertyCount );
  71863. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  71864. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71865. return properties;
  71866. }
  71867. template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type >
  71868. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
  71869. {
  71870. std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  71871. uint32_t propertyCount;
  71872. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  71873. properties.resize( propertyCount );
  71874. d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  71875. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71876. return properties;
  71877. }
  71878. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71879. template <typename Dispatch>
  71880. VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2* pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71881. {
  71882. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast< VkSparseImageFormatProperties2 *>( pProperties ) );
  71883. }
  71884. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71885. template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
  71886. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
  71887. {
  71888. std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
  71889. uint32_t propertyCount;
  71890. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  71891. properties.resize( propertyCount );
  71892. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  71893. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71894. return properties;
  71895. }
  71896. template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SparseImageFormatProperties2>::value, int>::type >
  71897. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
  71898. {
  71899. std::vector<SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
  71900. uint32_t propertyCount;
  71901. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
  71902. properties.resize( propertyCount );
  71903. d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
  71904. VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
  71905. return properties;
  71906. }
  71907. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71908. template <typename Dispatch>
  71909. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( uint32_t* pCombinationCount, VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV* pCombinations, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71910. {
  71911. return static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, pCombinationCount, reinterpret_cast< VkFramebufferMixedSamplesCombinationNV *>( pCombinations ) ) );
  71912. }
  71913. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71914. template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch>
  71915. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( Dispatch const & d ) const
  71916. {
  71917. std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations;
  71918. uint32_t combinationCount;
  71919. Result result;
  71920. do
  71921. {
  71922. result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
  71923. if ( ( result == Result::eSuccess ) && combinationCount )
  71924. {
  71925. combinations.resize( combinationCount );
  71926. result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
  71927. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  71928. }
  71929. } while ( result == Result::eIncomplete );
  71930. if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
  71931. {
  71932. combinations.resize( combinationCount );
  71933. }
  71934. return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  71935. }
  71936. template <typename FramebufferMixedSamplesCombinationNVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, FramebufferMixedSamplesCombinationNV>::value, int>::type >
  71937. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator>>::type PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV( FramebufferMixedSamplesCombinationNVAllocator & framebufferMixedSamplesCombinationNVAllocator, Dispatch const & d ) const
  71938. {
  71939. std::vector<FramebufferMixedSamplesCombinationNV, FramebufferMixedSamplesCombinationNVAllocator> combinations( framebufferMixedSamplesCombinationNVAllocator );
  71940. uint32_t combinationCount;
  71941. Result result;
  71942. do
  71943. {
  71944. result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, nullptr ) );
  71945. if ( ( result == Result::eSuccess ) && combinationCount )
  71946. {
  71947. combinations.resize( combinationCount );
  71948. result = static_cast<Result>( d.vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( m_physicalDevice, &combinationCount, reinterpret_cast<VkFramebufferMixedSamplesCombinationNV *>( combinations.data() ) ) );
  71949. VULKAN_HPP_ASSERT( combinationCount <= combinations.size() );
  71950. }
  71951. } while ( result == Result::eIncomplete );
  71952. if ( ( result == Result::eSuccess ) && ( combinationCount < combinations.size() ) )
  71953. {
  71954. combinations.resize( combinationCount );
  71955. }
  71956. return createResultValue( result, combinations, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSupportedFramebufferMixedSamplesCombinationsNV" );
  71957. }
  71958. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71959. template <typename Dispatch>
  71960. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT* pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71961. {
  71962. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) );
  71963. }
  71964. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71965. template <typename Dispatch>
  71966. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  71967. {
  71968. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities;
  71969. Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ) );
  71970. return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" );
  71971. }
  71972. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71973. template <typename Dispatch>
  71974. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR* pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71975. {
  71976. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast< VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) );
  71977. }
  71978. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  71979. template <typename Dispatch>
  71980. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  71981. {
  71982. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities;
  71983. Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
  71984. return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" );
  71985. }
  71986. template <typename X, typename Y, typename... Z, typename Dispatch>
  71987. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  71988. {
  71989. StructureChain<X, Y, Z...> structureChain;
  71990. VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>();
  71991. Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ) );
  71992. return createResultValue( result, structureChain, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceCapabilities2KHR" );
  71993. }
  71994. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  71995. template <typename Dispatch>
  71996. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR* pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  71997. {
  71998. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) );
  71999. }
  72000. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72001. template <typename Dispatch>
  72002. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  72003. {
  72004. VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities;
  72005. Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ) );
  72006. return createResultValue( result, surfaceCapabilities, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" );
  72007. }
  72008. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72009. template <typename Dispatch>
  72010. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR* pSurfaceFormats, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72011. {
  72012. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast< VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) );
  72013. }
  72014. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72015. template <typename SurfaceFormat2KHRAllocator, typename Dispatch>
  72016. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  72017. {
  72018. std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats;
  72019. uint32_t surfaceFormatCount;
  72020. Result result;
  72021. do
  72022. {
  72023. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
  72024. if ( ( result == Result::eSuccess ) && surfaceFormatCount )
  72025. {
  72026. surfaceFormats.resize( surfaceFormatCount );
  72027. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
  72028. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  72029. }
  72030. } while ( result == Result::eIncomplete );
  72031. if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
  72032. {
  72033. surfaceFormats.resize( surfaceFormatCount );
  72034. }
  72035. return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
  72036. }
  72037. template <typename SurfaceFormat2KHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormat2KHR>::value, int>::type >
  72038. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d ) const
  72039. {
  72040. std::vector<SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator );
  72041. uint32_t surfaceFormatCount;
  72042. Result result;
  72043. do
  72044. {
  72045. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ) );
  72046. if ( ( result == Result::eSuccess ) && surfaceFormatCount )
  72047. {
  72048. surfaceFormats.resize( surfaceFormatCount );
  72049. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ) );
  72050. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  72051. }
  72052. } while ( result == Result::eIncomplete );
  72053. if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
  72054. {
  72055. surfaceFormats.resize( surfaceFormatCount );
  72056. }
  72057. return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormats2KHR" );
  72058. }
  72059. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72060. template <typename Dispatch>
  72061. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR* pSurfaceFormats, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72062. {
  72063. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast< VkSurfaceFormatKHR *>( pSurfaceFormats ) ) );
  72064. }
  72065. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72066. template <typename SurfaceFormatKHRAllocator, typename Dispatch>
  72067. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  72068. {
  72069. std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats;
  72070. uint32_t surfaceFormatCount;
  72071. Result result;
  72072. do
  72073. {
  72074. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
  72075. if ( ( result == Result::eSuccess ) && surfaceFormatCount )
  72076. {
  72077. surfaceFormats.resize( surfaceFormatCount );
  72078. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
  72079. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  72080. }
  72081. } while ( result == Result::eIncomplete );
  72082. if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
  72083. {
  72084. surfaceFormats.resize( surfaceFormatCount );
  72085. }
  72086. return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
  72087. }
  72088. template <typename SurfaceFormatKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, SurfaceFormatKHR>::value, int>::type >
  72089. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d ) const
  72090. {
  72091. std::vector<SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator );
  72092. uint32_t surfaceFormatCount;
  72093. Result result;
  72094. do
  72095. {
  72096. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ) );
  72097. if ( ( result == Result::eSuccess ) && surfaceFormatCount )
  72098. {
  72099. surfaceFormats.resize( surfaceFormatCount );
  72100. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ) );
  72101. VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() );
  72102. }
  72103. } while ( result == Result::eIncomplete );
  72104. if ( ( result == Result::eSuccess ) && ( surfaceFormatCount < surfaceFormats.size() ) )
  72105. {
  72106. surfaceFormats.resize( surfaceFormatCount );
  72107. }
  72108. return createResultValue( result, surfaceFormats, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfaceFormatsKHR" );
  72109. }
  72110. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72111. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72112. template <typename Dispatch>
  72113. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModes2EXT( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72114. {
  72115. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pPresentModeCount, reinterpret_cast< VkPresentModeKHR *>( pPresentModes ) ) );
  72116. }
  72117. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72118. template <typename PresentModeKHRAllocator, typename Dispatch>
  72119. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const
  72120. {
  72121. std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
  72122. uint32_t presentModeCount;
  72123. Result result;
  72124. do
  72125. {
  72126. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) );
  72127. if ( ( result == Result::eSuccess ) && presentModeCount )
  72128. {
  72129. presentModes.resize( presentModeCount );
  72130. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
  72131. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  72132. }
  72133. } while ( result == Result::eIncomplete );
  72134. if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
  72135. {
  72136. presentModes.resize( presentModeCount );
  72137. }
  72138. return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
  72139. }
  72140. template <typename PresentModeKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type >
  72141. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModes2EXT( const PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const
  72142. {
  72143. std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  72144. uint32_t presentModeCount;
  72145. Result result;
  72146. do
  72147. {
  72148. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, nullptr ) );
  72149. if ( ( result == Result::eSuccess ) && presentModeCount )
  72150. {
  72151. presentModes.resize( presentModeCount );
  72152. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModes2EXT( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
  72153. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  72154. }
  72155. } while ( result == Result::eIncomplete );
  72156. if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
  72157. {
  72158. presentModes.resize( presentModeCount );
  72159. }
  72160. return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModes2EXT" );
  72161. }
  72162. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72163. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72164. template <typename Dispatch>
  72165. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t* pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR* pPresentModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72166. {
  72167. return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast< VkPresentModeKHR *>( pPresentModes ) ) );
  72168. }
  72169. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72170. template <typename PresentModeKHRAllocator, typename Dispatch>
  72171. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  72172. {
  72173. std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes;
  72174. uint32_t presentModeCount;
  72175. Result result;
  72176. do
  72177. {
  72178. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
  72179. if ( ( result == Result::eSuccess ) && presentModeCount )
  72180. {
  72181. presentModes.resize( presentModeCount );
  72182. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
  72183. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  72184. }
  72185. } while ( result == Result::eIncomplete );
  72186. if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
  72187. {
  72188. presentModes.resize( presentModeCount );
  72189. }
  72190. return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
  72191. }
  72192. template <typename PresentModeKHRAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PresentModeKHR>::value, int>::type >
  72193. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const
  72194. {
  72195. std::vector<PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator );
  72196. uint32_t presentModeCount;
  72197. Result result;
  72198. do
  72199. {
  72200. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ) );
  72201. if ( ( result == Result::eSuccess ) && presentModeCount )
  72202. {
  72203. presentModes.resize( presentModeCount );
  72204. result = static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ) );
  72205. VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() );
  72206. }
  72207. } while ( result == Result::eIncomplete );
  72208. if ( ( result == Result::eSuccess ) && ( presentModeCount < presentModes.size() ) )
  72209. {
  72210. presentModes.resize( presentModeCount );
  72211. }
  72212. return createResultValue( result, presentModes, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getSurfacePresentModesKHR" );
  72213. }
  72214. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72215. template <typename Dispatch>
  72216. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32* pSupported, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72217. {
  72218. return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast< VkBool32 *>( pSupported ) ) );
  72219. }
  72220. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72221. template <typename Dispatch>
  72222. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const
  72223. {
  72224. VULKAN_HPP_NAMESPACE::Bool32 supported;
  72225. Result result = static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ) );
  72226. return createResultValue( result, supported, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" );
  72227. }
  72228. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72229. template <typename Dispatch>
  72230. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolPropertiesEXT( uint32_t* pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT* pToolProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72231. {
  72232. return static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, pToolCount, reinterpret_cast< VkPhysicalDeviceToolPropertiesEXT *>( pToolProperties ) ) );
  72233. }
  72234. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72235. template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch>
  72236. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type PhysicalDevice::getToolPropertiesEXT( Dispatch const & d ) const
  72237. {
  72238. std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties;
  72239. uint32_t toolCount;
  72240. Result result;
  72241. do
  72242. {
  72243. result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
  72244. if ( ( result == Result::eSuccess ) && toolCount )
  72245. {
  72246. toolProperties.resize( toolCount );
  72247. result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
  72248. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  72249. }
  72250. } while ( result == Result::eIncomplete );
  72251. if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
  72252. {
  72253. toolProperties.resize( toolCount );
  72254. }
  72255. return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
  72256. }
  72257. template <typename PhysicalDeviceToolPropertiesEXTAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, PhysicalDeviceToolPropertiesEXT>::value, int>::type >
  72258. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator>>::type PhysicalDevice::getToolPropertiesEXT( PhysicalDeviceToolPropertiesEXTAllocator & physicalDeviceToolPropertiesEXTAllocator, Dispatch const & d ) const
  72259. {
  72260. std::vector<PhysicalDeviceToolPropertiesEXT, PhysicalDeviceToolPropertiesEXTAllocator> toolProperties( physicalDeviceToolPropertiesEXTAllocator );
  72261. uint32_t toolCount;
  72262. Result result;
  72263. do
  72264. {
  72265. result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, nullptr ) );
  72266. if ( ( result == Result::eSuccess ) && toolCount )
  72267. {
  72268. toolProperties.resize( toolCount );
  72269. result = static_cast<Result>( d.vkGetPhysicalDeviceToolPropertiesEXT( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolPropertiesEXT *>( toolProperties.data() ) ) );
  72270. VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
  72271. }
  72272. } while ( result == Result::eIncomplete );
  72273. if ( ( result == Result::eSuccess ) && ( toolCount < toolProperties.size() ) )
  72274. {
  72275. toolProperties.resize( toolCount );
  72276. }
  72277. return createResultValue( result, toolProperties, VULKAN_HPP_NAMESPACE_STRING"::PhysicalDevice::getToolPropertiesEXT" );
  72278. }
  72279. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72280. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  72281. template <typename Dispatch>
  72282. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display* display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72283. {
  72284. return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) );
  72285. }
  72286. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72287. template <typename Dispatch>
  72288. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72289. {
  72290. return d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display );
  72291. }
  72292. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72293. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  72294. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72295. template <typename Dispatch>
  72296. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72297. {
  72298. return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) );
  72299. }
  72300. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72301. #ifdef VK_USE_PLATFORM_XCB_KHR
  72302. template <typename Dispatch>
  72303. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72304. {
  72305. return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) );
  72306. }
  72307. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72308. template <typename Dispatch>
  72309. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72310. {
  72311. return d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id );
  72312. }
  72313. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72314. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  72315. #ifdef VK_USE_PLATFORM_XLIB_KHR
  72316. template <typename Dispatch>
  72317. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display* dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72318. {
  72319. return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) );
  72320. }
  72321. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72322. template <typename Dispatch>
  72323. VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72324. {
  72325. return d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID );
  72326. }
  72327. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72328. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  72329. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  72330. template <typename Dispatch>
  72331. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display* dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72332. {
  72333. return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast< VkDisplayKHR *>( pDisplay ) ) );
  72334. }
  72335. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72336. template <typename Dispatch>
  72337. VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  72338. {
  72339. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  72340. Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
  72341. return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" );
  72342. }
  72343. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  72344. template <typename Dispatch>
  72345. VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const
  72346. {
  72347. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  72348. Result result = static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
  72349. ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
  72350. return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique", deleter );
  72351. }
  72352. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  72353. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72354. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  72355. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72356. template <typename Dispatch>
  72357. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, VULKAN_HPP_NAMESPACE::DisplayKHR* pDisplay, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72358. {
  72359. return static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast< VkDisplayKHR *>( pDisplay ) ) );
  72360. }
  72361. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72362. template <typename Dispatch>
  72363. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getWinrtDisplayNV( uint32_t deviceRelativeId, Dispatch const & d ) const
  72364. {
  72365. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  72366. Result result = static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
  72367. return createResultValue( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNV" );
  72368. }
  72369. # ifndef VULKAN_HPP_NO_SMART_HANDLE
  72370. template <typename Dispatch>
  72371. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getWinrtDisplayNVUnique( uint32_t deviceRelativeId, Dispatch const & d ) const
  72372. {
  72373. VULKAN_HPP_NAMESPACE::DisplayKHR display;
  72374. Result result = static_cast<Result>( d.vkGetWinrtDisplayNV( m_physicalDevice, deviceRelativeId, reinterpret_cast<VkDisplayKHR *>( &display ) ) );
  72375. ObjectRelease<PhysicalDevice, Dispatch> deleter( *this, d );
  72376. return createResultValue<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( result, display, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getWinrtDisplayNVUnique", deleter );
  72377. }
  72378. # endif /*VULKAN_HPP_NO_SMART_HANDLE*/
  72379. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72380. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72381. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72382. template <typename Dispatch>
  72383. VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72384. {
  72385. return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  72386. }
  72387. #else
  72388. template <typename Dispatch>
  72389. VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const
  72390. {
  72391. Result result = static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) );
  72392. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::releaseDisplayEXT" );
  72393. }
  72394. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72395. template <typename Dispatch>
  72396. VULKAN_HPP_INLINE void Queue::getCheckpointDataNV( uint32_t* pCheckpointDataCount, VULKAN_HPP_NAMESPACE::CheckpointDataNV* pCheckpointData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72397. {
  72398. d.vkGetQueueCheckpointDataNV( m_queue, pCheckpointDataCount, reinterpret_cast< VkCheckpointDataNV *>( pCheckpointData ) );
  72399. }
  72400. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72401. template <typename CheckpointDataNVAllocator, typename Dispatch>
  72402. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( Dispatch const & d ) const
  72403. {
  72404. std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData;
  72405. uint32_t checkpointDataCount;
  72406. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  72407. checkpointData.resize( checkpointDataCount );
  72408. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  72409. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  72410. return checkpointData;
  72411. }
  72412. template <typename CheckpointDataNVAllocator, typename Dispatch, typename B, typename std::enable_if<std::is_same<typename B::value_type, CheckpointDataNV>::value, int>::type >
  72413. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<CheckpointDataNV, CheckpointDataNVAllocator> Queue::getCheckpointDataNV( CheckpointDataNVAllocator & checkpointDataNVAllocator, Dispatch const & d ) const
  72414. {
  72415. std::vector<CheckpointDataNV, CheckpointDataNVAllocator> checkpointData( checkpointDataNVAllocator );
  72416. uint32_t checkpointDataCount;
  72417. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, nullptr );
  72418. checkpointData.resize( checkpointDataCount );
  72419. d.vkGetQueueCheckpointDataNV( m_queue, &checkpointDataCount, reinterpret_cast<VkCheckpointDataNV *>( checkpointData.data() ) );
  72420. VULKAN_HPP_ASSERT( checkpointDataCount <= checkpointData.size() );
  72421. return checkpointData;
  72422. }
  72423. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72424. template <typename Dispatch>
  72425. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72426. {
  72427. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  72428. }
  72429. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72430. template <typename Dispatch>
  72431. VULKAN_HPP_INLINE void Queue::beginDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72432. {
  72433. d.vkQueueBeginDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  72434. }
  72435. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72436. template <typename Dispatch>
  72437. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo* pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72438. {
  72439. return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
  72440. }
  72441. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72442. template <typename Dispatch>
  72443. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  72444. {
  72445. Result result = static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ) );
  72446. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
  72447. }
  72448. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72449. template <typename Dispatch>
  72450. VULKAN_HPP_INLINE void Queue::endDebugUtilsLabelEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72451. {
  72452. d.vkQueueEndDebugUtilsLabelEXT( m_queue );
  72453. }
  72454. template <typename Dispatch>
  72455. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT* pLabelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72456. {
  72457. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( pLabelInfo ) );
  72458. }
  72459. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72460. template <typename Dispatch>
  72461. VULKAN_HPP_INLINE void Queue::insertDebugUtilsLabelEXT( const DebugUtilsLabelEXT & labelInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72462. {
  72463. d.vkQueueInsertDebugUtilsLabelEXT( m_queue, reinterpret_cast<const VkDebugUtilsLabelEXT *>( &labelInfo ) );
  72464. }
  72465. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72466. template <typename Dispatch>
  72467. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR* pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72468. {
  72469. return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) );
  72470. }
  72471. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72472. template <typename Dispatch>
  72473. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const PresentInfoKHR & presentInfo, Dispatch const & d ) const
  72474. {
  72475. Result result = static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ) );
  72476. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } );
  72477. }
  72478. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72479. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72480. template <typename Dispatch>
  72481. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72482. {
  72483. return static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  72484. }
  72485. #else
  72486. template <typename Dispatch>
  72487. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::setPerformanceConfigurationINTEL( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL configuration, Dispatch const & d ) const
  72488. {
  72489. Result result = static_cast<Result>( d.vkQueueSetPerformanceConfigurationINTEL( m_queue, static_cast<VkPerformanceConfigurationINTEL>( configuration ) ) );
  72490. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::setPerformanceConfigurationINTEL" );
  72491. }
  72492. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72493. template <typename Dispatch>
  72494. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo* pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72495. {
  72496. return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
  72497. }
  72498. #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72499. template <typename Dispatch>
  72500. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
  72501. {
  72502. Result result = static_cast<Result>( d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ) );
  72503. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
  72504. }
  72505. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72506. #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
  72507. template <typename Dispatch>
  72508. VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
  72509. {
  72510. return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
  72511. }
  72512. #else
  72513. template <typename Dispatch>
  72514. VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
  72515. {
  72516. Result result = static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
  72517. return createResultValue( result, VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
  72518. }
  72519. #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
  72520. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  72521. template <> struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
  72522. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  72523. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  72524. template <> struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>{ enum { value = true }; };
  72525. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  72526. template <> struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>{ enum { value = true }; };
  72527. template <> struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>{ enum { value = true }; };
  72528. template <> struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>{ enum { value = true }; };
  72529. template <> struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>{ enum { value = true }; };
  72530. template <> struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>{ enum { value = true }; };
  72531. template <> struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>{ enum { value = true }; };
  72532. template <> struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
  72533. template <> struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>{ enum { value = true }; };
  72534. template <> struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>{ enum { value = true }; };
  72535. template <> struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>{ enum { value = true }; };
  72536. template <> struct StructExtends<CopyCommandTransformInfoQCOM, BufferImageCopy2KHR>{ enum { value = true }; };
  72537. template <> struct StructExtends<CopyCommandTransformInfoQCOM, ImageBlit2KHR>{ enum { value = true }; };
  72538. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72539. template <> struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
  72540. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72541. template <> struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
  72542. template <> struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
  72543. template <> struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>{ enum { value = true }; };
  72544. template <> struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
  72545. template <> struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
  72546. template <> struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfoEXT, DescriptorPoolCreateInfo>{ enum { value = true }; };
  72547. template <> struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
  72548. template <> struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>{ enum { value = true }; };
  72549. template <> struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>{ enum { value = true }; };
  72550. template <> struct StructExtends<DeviceDeviceMemoryReportCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
  72551. template <> struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>{ enum { value = true }; };
  72552. template <> struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>{ enum { value = true }; };
  72553. template <> struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>{ enum { value = true }; };
  72554. template <> struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
  72555. template <> struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
  72556. template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
  72557. template <> struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>{ enum { value = true }; };
  72558. template <> struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72559. template <> struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>{ enum { value = true }; };
  72560. template <> struct StructExtends<DevicePrivateDataCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
  72561. template <> struct StructExtends<DeviceQueueGlobalPriorityCreateInfoEXT, DeviceQueueCreateInfo>{ enum { value = true }; };
  72562. template <> struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>{ enum { value = true }; };
  72563. template <> struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
  72564. template <> struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>{ enum { value = true }; };
  72565. template <> struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>{ enum { value = true }; };
  72566. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72567. template <> struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>{ enum { value = true }; };
  72568. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72569. template <> struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
  72570. template <> struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
  72571. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72572. template <> struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
  72573. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72574. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72575. template <> struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
  72576. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72577. template <> struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
  72578. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72579. template <> struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>{ enum { value = true }; };
  72580. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72581. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  72582. template <> struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>{ enum { value = true }; };
  72583. template <> struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>{ enum { value = true }; };
  72584. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  72585. template <> struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
  72586. template <> struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>{ enum { value = true }; };
  72587. template <> struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
  72588. template <> struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
  72589. template <> struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
  72590. template <> struct StructExtends<FragmentShadingRateAttachmentInfoKHR, SubpassDescription2>{ enum { value = true }; };
  72591. template <> struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>{ enum { value = true }; };
  72592. template <> struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72593. template <> struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
  72594. template <> struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
  72595. template <> struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>{ enum { value = true }; };
  72596. template <> struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72597. template <> struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
  72598. template <> struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>{ enum { value = true }; };
  72599. template <> struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
  72600. template <> struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
  72601. template <> struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>{ enum { value = true }; };
  72602. template <> struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>{ enum { value = true }; };
  72603. template <> struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>{ enum { value = true }; };
  72604. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  72605. template <> struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>{ enum { value = true }; };
  72606. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  72607. template <> struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
  72608. template <> struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
  72609. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72610. template <> struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
  72611. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72612. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72613. template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
  72614. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72615. template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
  72616. template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
  72617. template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
  72618. template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
  72619. template <> struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
  72620. template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
  72621. template <> struct StructExtends<MutableDescriptorTypeCreateInfoVALVE, DescriptorPoolCreateInfo>{ enum { value = true }; };
  72622. template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
  72623. template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72624. template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72625. template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72626. template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72627. template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72628. template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72629. template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72630. template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72631. template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72632. template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72633. template <> struct StructExtends<PhysicalDeviceAccelerationStructurePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72634. template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72635. template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72636. template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72637. template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72638. template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72639. template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72640. template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72641. template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72642. template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
  72643. template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72644. template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72645. template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72646. template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72647. template <> struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72648. template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72649. template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72650. template <> struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72651. template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72652. template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72653. template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72654. template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72655. template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72656. template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72657. template <> struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72658. template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72659. template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72660. template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72661. template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72662. template <> struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72663. template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72664. template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72665. template <> struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72666. template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72667. template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72668. template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72669. template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72670. template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72671. template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72672. template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72673. template <> struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72674. template <> struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72675. template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72676. template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72677. template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72678. template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72679. template <> struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
  72680. template <> struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72681. template <> struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>{ enum { value = true }; };
  72682. template <> struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72683. template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72684. template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72685. template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72686. template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72687. template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72688. template <> struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72689. template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72690. template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72691. template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72692. template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72693. template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72694. template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72695. template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72696. template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72697. template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72698. template <> struct StructExtends<PhysicalDeviceFragmentShadingRatePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72699. template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72700. template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72701. template <> struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72702. template <> struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
  72703. template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72704. template <> struct StructExtends<PhysicalDeviceImageRobustnessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72705. template <> struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
  72706. template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72707. template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72708. template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72709. template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72710. template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72711. template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72712. template <> struct StructExtends<PhysicalDeviceInlineUniformBlockPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72713. template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72714. template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72715. template <> struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72716. template <> struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72717. template <> struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>{ enum { value = true }; };
  72718. template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72719. template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72720. template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72721. template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72722. template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72723. template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72724. template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72725. template <> struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>{ enum { value = true }; };
  72726. template <> struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72727. template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72728. template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesVALVE, DeviceCreateInfo>{ enum { value = true }; };
  72729. template <> struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72730. template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72731. template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72732. template <> struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72733. template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72734. template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72735. template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72736. template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72737. template <> struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72738. #ifdef VK_ENABLE_BETA_EXTENSIONS
  72739. template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72740. template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72741. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  72742. #ifdef VK_ENABLE_BETA_EXTENSIONS
  72743. template <> struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72744. #endif /*VK_ENABLE_BETA_EXTENSIONS*/
  72745. template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72746. template <> struct StructExtends<PhysicalDevicePrivateDataFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72747. template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72748. template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72749. template <> struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72750. template <> struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72751. template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72752. template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72753. template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72754. template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72755. template <> struct StructExtends<PhysicalDeviceRayTracingPipelinePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
  72756. template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72757. template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72758. template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72759. template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72760. template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72761. template <> struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72762. template <> struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72763. template <> struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72764. template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72765. template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72766. template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72767. template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72768. template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72769. template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72770. template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72771. template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72772. template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72773. template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>{ enum { value = true }; };
  72774. template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72775. template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72776. template <> struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>{ enum { value = true }; };
  72777. template <> struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>{ enum { value = true }; };
  72778. template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72779. template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72780. template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72781. template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72782. template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72783. template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>{ enum { value = true }; };
  72784. template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72785. template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72786. template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72787. template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72788. template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72789. template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>{ enum { value = true }; };
  72790. template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72791. template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72792. template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72793. template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72794. template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72795. template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72796. template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72797. template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72798. template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
  72799. template <> struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
  72800. template <> struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72801. template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72802. template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72803. template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72804. template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72805. template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72806. template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72807. template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72808. template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72809. template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72810. template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72811. template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72812. template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72813. template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72814. template <> struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72815. template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72816. template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72817. template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72818. template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72819. template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72820. template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72821. template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
  72822. template <> struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72823. template <> struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>{ enum { value = true }; };
  72824. template <> struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72825. template <> struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72826. template <> struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>{ enum { value = true }; };
  72827. template <> struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
  72828. template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72829. template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>{ enum { value = true }; };
  72830. template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72831. template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72832. template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72833. template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
  72834. template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
  72835. template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
  72836. template <> struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
  72837. template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72838. template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>{ enum { value = true }; };
  72839. template <> struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
  72840. template <> struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
  72841. template <> struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
  72842. template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72843. template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, ComputePipelineCreateInfo>{ enum { value = true }; };
  72844. template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoNV>{ enum { value = true }; };
  72845. template <> struct StructExtends<PipelineCreationFeedbackCreateInfoEXT, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
  72846. template <> struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72847. template <> struct StructExtends<PipelineFragmentShadingRateEnumStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72848. template <> struct StructExtends<PipelineFragmentShadingRateStateCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72849. template <> struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
  72850. template <> struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
  72851. template <> struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
  72852. template <> struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
  72853. template <> struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
  72854. template <> struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
  72855. template <> struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
  72856. template <> struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
  72857. template <> struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>{ enum { value = true }; };
  72858. template <> struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>{ enum { value = true }; };
  72859. template <> struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
  72860. template <> struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
  72861. template <> struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
  72862. template <> struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
  72863. template <> struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
  72864. #ifdef VK_USE_PLATFORM_GGP
  72865. template <> struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>{ enum { value = true }; };
  72866. #endif /*VK_USE_PLATFORM_GGP*/
  72867. template <> struct StructExtends<PresentRegionsKHR, PresentInfoKHR>{ enum { value = true }; };
  72868. template <> struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>{ enum { value = true }; };
  72869. template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
  72870. template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
  72871. template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
  72872. template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
  72873. template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
  72874. template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
  72875. template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
  72876. template <> struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
  72877. template <> struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
  72878. template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
  72879. template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
  72880. template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
  72881. template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
  72882. template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
  72883. template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
  72884. template <> struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>{ enum { value = true }; };
  72885. template <> struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>{ enum { value = true }; };
  72886. template <> struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
  72887. template <> struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>{ enum { value = true }; };
  72888. template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>{ enum { value = true }; };
  72889. template <> struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
  72890. template <> struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>{ enum { value = true }; };
  72891. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72892. template <> struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
  72893. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72894. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72895. template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
  72896. template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72897. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72898. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72899. template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
  72900. template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72901. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72902. template <> struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
  72903. template <> struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72904. template <> struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>{ enum { value = true }; };
  72905. template <> struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>{ enum { value = true }; };
  72906. template <> struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>{ enum { value = true }; };
  72907. template <> struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>{ enum { value = true }; };
  72908. template <> struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>{ enum { value = true }; };
  72909. template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
  72910. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72911. template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
  72912. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72913. #ifdef VK_USE_PLATFORM_WIN32_KHR
  72914. template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
  72915. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  72916. template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
  72917. template <> struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>{ enum { value = true }; };
  72918. template <> struct StructExtends<WriteDescriptorSetInlineUniformBlockEXT, WriteDescriptorSet>{ enum { value = true }; };
  72919. #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  72920. class DynamicLoader
  72921. {
  72922. public:
  72923. # ifdef VULKAN_HPP_NO_EXCEPTIONS
  72924. DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT
  72925. # else
  72926. DynamicLoader( std::string const & vulkanLibraryName = {} )
  72927. # endif
  72928. {
  72929. if ( !vulkanLibraryName.empty() )
  72930. {
  72931. # if defined( __linux__ ) || defined( __APPLE__ )
  72932. m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
  72933. # elif defined( _WIN32 )
  72934. m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
  72935. # else
  72936. # error unsupported platform
  72937. # endif
  72938. }
  72939. else
  72940. {
  72941. # if defined( __linux__ )
  72942. m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
  72943. if ( m_library == nullptr )
  72944. {
  72945. m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
  72946. }
  72947. # elif defined( __APPLE__ )
  72948. m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
  72949. # elif defined( _WIN32 )
  72950. m_library = ::LoadLibraryA( "vulkan-1.dll" );
  72951. # else
  72952. # error unsupported platform
  72953. # endif
  72954. }
  72955. #ifndef VULKAN_HPP_NO_EXCEPTIONS
  72956. if ( m_library == nullptr )
  72957. {
  72958. // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
  72959. throw std::runtime_error( "Failed to load vulkan library!" );
  72960. }
  72961. #endif
  72962. }
  72963. DynamicLoader( DynamicLoader const& ) = delete;
  72964. DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library(other.m_library)
  72965. {
  72966. other.m_library = nullptr;
  72967. }
  72968. DynamicLoader &operator=( DynamicLoader const& ) = delete;
  72969. DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
  72970. {
  72971. std::swap(m_library, other.m_library);
  72972. return *this;
  72973. }
  72974. ~DynamicLoader() VULKAN_HPP_NOEXCEPT
  72975. {
  72976. if ( m_library )
  72977. {
  72978. # if defined( __linux__ ) || defined( __APPLE__ )
  72979. dlclose( m_library );
  72980. # elif defined( _WIN32 )
  72981. ::FreeLibrary( m_library );
  72982. # else
  72983. # error unsupported platform
  72984. # endif
  72985. }
  72986. }
  72987. template <typename T>
  72988. T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
  72989. {
  72990. # if defined( __linux__ ) || defined( __APPLE__ )
  72991. return (T)dlsym( m_library, function );
  72992. # elif defined( _WIN32 )
  72993. return (T)::GetProcAddress( m_library, function );
  72994. # else
  72995. # error unsupported platform
  72996. # endif
  72997. }
  72998. bool success() const VULKAN_HPP_NOEXCEPT { return m_library != nullptr; }
  72999. private:
  73000. # if defined( __linux__ ) || defined( __APPLE__ )
  73001. void * m_library;
  73002. # elif defined( _WIN32 )
  73003. ::HINSTANCE m_library;
  73004. # else
  73005. # error unsupported platform
  73006. # endif
  73007. };
  73008. #endif
  73009. class DispatchLoaderDynamic
  73010. {
  73011. public:
  73012. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73013. PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
  73014. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73015. PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
  73016. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
  73017. PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
  73018. PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
  73019. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73020. PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
  73021. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73022. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  73023. PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
  73024. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  73025. PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
  73026. PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
  73027. PFN_vkAllocateMemory vkAllocateMemory = 0;
  73028. PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
  73029. PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
  73030. PFN_vkBindBufferMemory vkBindBufferMemory = 0;
  73031. PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
  73032. PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
  73033. PFN_vkBindImageMemory vkBindImageMemory = 0;
  73034. PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
  73035. PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
  73036. PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
  73037. PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
  73038. PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
  73039. PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
  73040. PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
  73041. PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
  73042. PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
  73043. PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
  73044. PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
  73045. PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
  73046. PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
  73047. PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
  73048. PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
  73049. PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
  73050. PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
  73051. PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
  73052. PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
  73053. PFN_vkCmdBlitImage vkCmdBlitImage = 0;
  73054. PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
  73055. PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
  73056. PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
  73057. PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
  73058. PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
  73059. PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
  73060. PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
  73061. PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
  73062. PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
  73063. PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
  73064. PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
  73065. PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
  73066. PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
  73067. PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
  73068. PFN_vkCmdCopyImage vkCmdCopyImage = 0;
  73069. PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
  73070. PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
  73071. PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
  73072. PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
  73073. PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
  73074. PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
  73075. PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
  73076. PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
  73077. PFN_vkCmdDispatch vkCmdDispatch = 0;
  73078. PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
  73079. PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
  73080. PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
  73081. PFN_vkCmdDraw vkCmdDraw = 0;
  73082. PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
  73083. PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
  73084. PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
  73085. PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
  73086. PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
  73087. PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
  73088. PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
  73089. PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
  73090. PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
  73091. PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
  73092. PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
  73093. PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
  73094. PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
  73095. PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
  73096. PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
  73097. PFN_vkCmdEndQuery vkCmdEndQuery = 0;
  73098. PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
  73099. PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
  73100. PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
  73101. PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
  73102. PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
  73103. PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
  73104. PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
  73105. PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
  73106. PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
  73107. PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
  73108. PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
  73109. PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
  73110. PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
  73111. PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
  73112. PFN_vkCmdPushConstants vkCmdPushConstants = 0;
  73113. PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
  73114. PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
  73115. PFN_vkCmdResetEvent vkCmdResetEvent = 0;
  73116. PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
  73117. PFN_vkCmdResolveImage vkCmdResolveImage = 0;
  73118. PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
  73119. PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
  73120. PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
  73121. PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
  73122. PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
  73123. PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
  73124. PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
  73125. PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
  73126. PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
  73127. PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
  73128. PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
  73129. PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
  73130. PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
  73131. PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
  73132. PFN_vkCmdSetEvent vkCmdSetEvent = 0;
  73133. PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
  73134. PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
  73135. PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
  73136. PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
  73137. PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
  73138. PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
  73139. PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
  73140. PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
  73141. PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
  73142. PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
  73143. PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
  73144. PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
  73145. PFN_vkCmdSetScissor vkCmdSetScissor = 0;
  73146. PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
  73147. PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
  73148. PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
  73149. PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
  73150. PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
  73151. PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
  73152. PFN_vkCmdSetViewport vkCmdSetViewport = 0;
  73153. PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
  73154. PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
  73155. PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
  73156. PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
  73157. PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
  73158. PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
  73159. PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
  73160. PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
  73161. PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
  73162. PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
  73163. PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
  73164. PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
  73165. PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
  73166. PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
  73167. PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
  73168. PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
  73169. PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
  73170. PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
  73171. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  73172. PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
  73173. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  73174. PFN_vkCreateBuffer vkCreateBuffer = 0;
  73175. PFN_vkCreateBufferView vkCreateBufferView = 0;
  73176. PFN_vkCreateCommandPool vkCreateCommandPool = 0;
  73177. PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
  73178. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
  73179. PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
  73180. PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
  73181. PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
  73182. PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
  73183. PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
  73184. PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
  73185. PFN_vkCreateDevice vkCreateDevice = 0;
  73186. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  73187. PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
  73188. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  73189. PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
  73190. PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
  73191. PFN_vkCreateEvent vkCreateEvent = 0;
  73192. PFN_vkCreateFence vkCreateFence = 0;
  73193. PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
  73194. PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
  73195. PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
  73196. #ifdef VK_USE_PLATFORM_IOS_MVK
  73197. PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
  73198. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  73199. PFN_vkCreateImage vkCreateImage = 0;
  73200. #ifdef VK_USE_PLATFORM_FUCHSIA
  73201. PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
  73202. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  73203. PFN_vkCreateImageView vkCreateImageView = 0;
  73204. PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
  73205. PFN_vkCreateInstance vkCreateInstance = 0;
  73206. #ifdef VK_USE_PLATFORM_MACOS_MVK
  73207. PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
  73208. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  73209. #ifdef VK_USE_PLATFORM_METAL_EXT
  73210. PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
  73211. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  73212. PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
  73213. PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
  73214. PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
  73215. PFN_vkCreateQueryPool vkCreateQueryPool = 0;
  73216. PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
  73217. PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
  73218. PFN_vkCreateRenderPass vkCreateRenderPass = 0;
  73219. PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
  73220. PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
  73221. PFN_vkCreateSampler vkCreateSampler = 0;
  73222. PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
  73223. PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
  73224. PFN_vkCreateSemaphore vkCreateSemaphore = 0;
  73225. PFN_vkCreateShaderModule vkCreateShaderModule = 0;
  73226. PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
  73227. #ifdef VK_USE_PLATFORM_GGP
  73228. PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
  73229. #endif /*VK_USE_PLATFORM_GGP*/
  73230. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
  73231. PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
  73232. #ifdef VK_USE_PLATFORM_VI_NN
  73233. PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
  73234. #endif /*VK_USE_PLATFORM_VI_NN*/
  73235. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  73236. PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
  73237. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  73238. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73239. PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
  73240. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73241. #ifdef VK_USE_PLATFORM_XCB_KHR
  73242. PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
  73243. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  73244. #ifdef VK_USE_PLATFORM_XLIB_KHR
  73245. PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
  73246. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  73247. PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
  73248. PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
  73249. PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
  73250. PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
  73251. PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
  73252. PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
  73253. PFN_vkDestroyBuffer vkDestroyBuffer = 0;
  73254. PFN_vkDestroyBufferView vkDestroyBufferView = 0;
  73255. PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
  73256. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
  73257. PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
  73258. PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
  73259. PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
  73260. PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
  73261. PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
  73262. PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
  73263. PFN_vkDestroyDevice vkDestroyDevice = 0;
  73264. PFN_vkDestroyEvent vkDestroyEvent = 0;
  73265. PFN_vkDestroyFence vkDestroyFence = 0;
  73266. PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
  73267. PFN_vkDestroyImage vkDestroyImage = 0;
  73268. PFN_vkDestroyImageView vkDestroyImageView = 0;
  73269. PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
  73270. PFN_vkDestroyInstance vkDestroyInstance = 0;
  73271. PFN_vkDestroyPipeline vkDestroyPipeline = 0;
  73272. PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
  73273. PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
  73274. PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
  73275. PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
  73276. PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
  73277. PFN_vkDestroySampler vkDestroySampler = 0;
  73278. PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
  73279. PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
  73280. PFN_vkDestroySemaphore vkDestroySemaphore = 0;
  73281. PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
  73282. PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
  73283. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
  73284. PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
  73285. PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
  73286. PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
  73287. PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
  73288. PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
  73289. PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
  73290. PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
  73291. PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
  73292. PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
  73293. PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
  73294. PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
  73295. PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
  73296. PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
  73297. PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
  73298. PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
  73299. PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
  73300. PFN_vkFreeMemory vkFreeMemory = 0;
  73301. PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
  73302. PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
  73303. PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
  73304. PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
  73305. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  73306. PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
  73307. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  73308. PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
  73309. PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
  73310. PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
  73311. PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
  73312. PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
  73313. PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
  73314. PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
  73315. PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
  73316. PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
  73317. PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
  73318. PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
  73319. PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
  73320. PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
  73321. PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
  73322. PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
  73323. PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
  73324. PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
  73325. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73326. PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
  73327. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73328. PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
  73329. PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
  73330. PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
  73331. PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
  73332. PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
  73333. PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
  73334. PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
  73335. PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
  73336. PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
  73337. PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
  73338. PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
  73339. PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
  73340. PFN_vkGetEventStatus vkGetEventStatus = 0;
  73341. PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
  73342. PFN_vkGetFenceStatus vkGetFenceStatus = 0;
  73343. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73344. PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
  73345. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73346. PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
  73347. PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
  73348. PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
  73349. PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
  73350. PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
  73351. PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
  73352. PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
  73353. PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
  73354. PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
  73355. PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
  73356. PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
  73357. PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
  73358. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  73359. PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
  73360. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  73361. PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
  73362. PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
  73363. PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
  73364. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73365. PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
  73366. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73367. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73368. PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
  73369. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73370. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73371. PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
  73372. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73373. PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
  73374. PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
  73375. PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
  73376. PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
  73377. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  73378. PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
  73379. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  73380. PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
  73381. PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
  73382. PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
  73383. PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
  73384. PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
  73385. PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
  73386. PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
  73387. PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
  73388. PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
  73389. PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
  73390. PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
  73391. PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
  73392. PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
  73393. PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
  73394. PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
  73395. PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
  73396. PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
  73397. PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
  73398. PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
  73399. PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
  73400. PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
  73401. PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
  73402. PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
  73403. PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
  73404. PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
  73405. PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
  73406. PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
  73407. PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
  73408. PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
  73409. PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
  73410. PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
  73411. PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
  73412. PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
  73413. PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
  73414. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
  73415. PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
  73416. PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
  73417. PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
  73418. PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
  73419. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
  73420. PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
  73421. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
  73422. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73423. PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
  73424. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73425. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
  73426. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
  73427. PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
  73428. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  73429. PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
  73430. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  73431. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73432. PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
  73433. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73434. #ifdef VK_USE_PLATFORM_XCB_KHR
  73435. PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
  73436. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  73437. #ifdef VK_USE_PLATFORM_XLIB_KHR
  73438. PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
  73439. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  73440. PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
  73441. PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
  73442. PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
  73443. PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
  73444. PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
  73445. PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
  73446. PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
  73447. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  73448. PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
  73449. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  73450. PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
  73451. PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
  73452. PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
  73453. PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
  73454. PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
  73455. PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
  73456. PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
  73457. PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
  73458. PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
  73459. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73460. PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
  73461. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73462. PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
  73463. PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
  73464. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
  73465. PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
  73466. PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
  73467. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73468. PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
  73469. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73470. PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
  73471. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73472. PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
  73473. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73474. PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
  73475. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73476. PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
  73477. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73478. PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
  73479. PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
  73480. PFN_vkMapMemory vkMapMemory = 0;
  73481. PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
  73482. PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
  73483. PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
  73484. PFN_vkQueueBindSparse vkQueueBindSparse = 0;
  73485. PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
  73486. PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
  73487. PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
  73488. PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
  73489. PFN_vkQueueSubmit vkQueueSubmit = 0;
  73490. PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
  73491. PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
  73492. PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
  73493. PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
  73494. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73495. PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
  73496. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73497. PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
  73498. PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
  73499. PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
  73500. PFN_vkResetCommandPool vkResetCommandPool = 0;
  73501. PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
  73502. PFN_vkResetEvent vkResetEvent = 0;
  73503. PFN_vkResetFences vkResetFences = 0;
  73504. PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
  73505. PFN_vkResetQueryPool vkResetQueryPool = 0;
  73506. PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
  73507. PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
  73508. PFN_vkSetEvent vkSetEvent = 0;
  73509. PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
  73510. PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
  73511. PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
  73512. PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
  73513. PFN_vkSignalSemaphore vkSignalSemaphore = 0;
  73514. PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
  73515. PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
  73516. PFN_vkTrimCommandPool vkTrimCommandPool = 0;
  73517. PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
  73518. PFN_vkUnmapMemory vkUnmapMemory = 0;
  73519. PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
  73520. PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
  73521. PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
  73522. PFN_vkWaitForFences vkWaitForFences = 0;
  73523. PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
  73524. PFN_vkWaitSemaphores vkWaitSemaphores = 0;
  73525. PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
  73526. public:
  73527. DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
  73528. #if !defined(VK_NO_PROTOTYPES)
  73529. // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
  73530. template <typename DynamicLoader>
  73531. void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device, DynamicLoader const& dl) VULKAN_HPP_NOEXCEPT
  73532. {
  73533. PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
  73534. PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
  73535. init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
  73536. }
  73537. // This interface is designed to be used for per-device function pointers in combination with a linked vulkan library.
  73538. template <typename DynamicLoader
  73539. #if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
  73540. = VULKAN_HPP_NAMESPACE::DynamicLoader
  73541. #endif
  73542. >
  73543. void init(VULKAN_HPP_NAMESPACE::Instance const& instance, VULKAN_HPP_NAMESPACE::Device const& device) VULKAN_HPP_NOEXCEPT
  73544. {
  73545. static DynamicLoader dl;
  73546. init(instance, device, dl);
  73547. }
  73548. #endif // !defined(VK_NO_PROTOTYPES)
  73549. DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
  73550. {
  73551. init(getInstanceProcAddr);
  73552. }
  73553. void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
  73554. {
  73555. VULKAN_HPP_ASSERT(getInstanceProcAddr);
  73556. vkGetInstanceProcAddr = getInstanceProcAddr;
  73557. vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
  73558. vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
  73559. vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
  73560. vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
  73561. }
  73562. // This interface does not require a linked vulkan library.
  73563. DispatchLoaderDynamic( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
  73564. {
  73565. init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
  73566. }
  73567. // This interface does not require a linked vulkan library.
  73568. void init( VkInstance instance, PFN_vkGetInstanceProcAddr getInstanceProcAddr, VkDevice device = VK_NULL_HANDLE, PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
  73569. {
  73570. VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
  73571. vkGetInstanceProcAddr = getInstanceProcAddr;
  73572. init( VULKAN_HPP_NAMESPACE::Instance(instance) );
  73573. if (device) {
  73574. init( VULKAN_HPP_NAMESPACE::Device(device) );
  73575. }
  73576. }
  73577. void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
  73578. {
  73579. VkInstance instance = static_cast<VkInstance>(instanceCpp);
  73580. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73581. vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
  73582. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73583. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  73584. vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
  73585. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  73586. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  73587. vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
  73588. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  73589. vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
  73590. vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
  73591. vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
  73592. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  73593. vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
  73594. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  73595. vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
  73596. vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
  73597. vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
  73598. #ifdef VK_USE_PLATFORM_IOS_MVK
  73599. vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
  73600. #endif /*VK_USE_PLATFORM_IOS_MVK*/
  73601. #ifdef VK_USE_PLATFORM_FUCHSIA
  73602. vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
  73603. #endif /*VK_USE_PLATFORM_FUCHSIA*/
  73604. #ifdef VK_USE_PLATFORM_MACOS_MVK
  73605. vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
  73606. #endif /*VK_USE_PLATFORM_MACOS_MVK*/
  73607. #ifdef VK_USE_PLATFORM_METAL_EXT
  73608. vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
  73609. #endif /*VK_USE_PLATFORM_METAL_EXT*/
  73610. #ifdef VK_USE_PLATFORM_GGP
  73611. vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
  73612. #endif /*VK_USE_PLATFORM_GGP*/
  73613. #ifdef VK_USE_PLATFORM_VI_NN
  73614. vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
  73615. #endif /*VK_USE_PLATFORM_VI_NN*/
  73616. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  73617. vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
  73618. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  73619. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73620. vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
  73621. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73622. #ifdef VK_USE_PLATFORM_XCB_KHR
  73623. vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
  73624. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  73625. #ifdef VK_USE_PLATFORM_XLIB_KHR
  73626. vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
  73627. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  73628. vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
  73629. vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
  73630. vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
  73631. vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
  73632. vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
  73633. vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
  73634. vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
  73635. vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
  73636. vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
  73637. if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
  73638. vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
  73639. vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
  73640. vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
  73641. vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
  73642. vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
  73643. vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
  73644. vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
  73645. vkGetInstanceProcAddr = PFN_vkGetInstanceProcAddr( vkGetInstanceProcAddr( instance, "vkGetInstanceProcAddr" ) );
  73646. vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
  73647. vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
  73648. #ifdef VK_USE_PLATFORM_DIRECTFB_EXT
  73649. vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
  73650. #endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
  73651. vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
  73652. vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
  73653. vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
  73654. vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
  73655. vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
  73656. vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
  73657. if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
  73658. vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
  73659. vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
  73660. if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
  73661. vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
  73662. vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
  73663. vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
  73664. if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
  73665. vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
  73666. vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
  73667. vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
  73668. if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
  73669. vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
  73670. vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
  73671. vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
  73672. if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
  73673. vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
  73674. vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
  73675. vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
  73676. vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
  73677. if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
  73678. vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
  73679. vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
  73680. vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
  73681. if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
  73682. vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
  73683. vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
  73684. vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
  73685. vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
  73686. vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
  73687. if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
  73688. vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
  73689. vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
  73690. vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
  73691. vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
  73692. if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
  73693. vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
  73694. vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
  73695. vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
  73696. if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
  73697. vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
  73698. vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
  73699. vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
  73700. vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
  73701. vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
  73702. vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
  73703. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73704. vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
  73705. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73706. vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
  73707. vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
  73708. vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
  73709. #ifdef VK_USE_PLATFORM_WAYLAND_KHR
  73710. vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
  73711. #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
  73712. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73713. vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
  73714. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73715. #ifdef VK_USE_PLATFORM_XCB_KHR
  73716. vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
  73717. #endif /*VK_USE_PLATFORM_XCB_KHR*/
  73718. #ifdef VK_USE_PLATFORM_XLIB_KHR
  73719. vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
  73720. #endif /*VK_USE_PLATFORM_XLIB_KHR*/
  73721. #ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
  73722. vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
  73723. #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
  73724. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73725. vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
  73726. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73727. vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
  73728. vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
  73729. #ifdef VK_USE_PLATFORM_WIN32_KHR
  73730. vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
  73731. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  73732. vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
  73733. vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
  73734. vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
  73735. vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
  73736. vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
  73737. vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
  73738. vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
  73739. vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
  73740. vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
  73741. vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
  73742. vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
  73743. vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
  73744. if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
  73745. vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
  73746. vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
  73747. vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
  73748. if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
  73749. vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
  73750. vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
  73751. vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
  73752. vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
  73753. vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
  73754. vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
  73755. vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
  73756. vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
  73757. if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
  73758. vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
  73759. vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
  73760. vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
  73761. vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
  73762. vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
  73763. vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
  73764. vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
  73765. vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
  73766. vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
  73767. vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
  73768. vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
  73769. vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
  73770. vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
  73771. vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
  73772. vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
  73773. vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
  73774. vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
  73775. vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
  73776. vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
  73777. vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
  73778. vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
  73779. vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
  73780. vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
  73781. vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
  73782. vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
  73783. vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
  73784. vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
  73785. vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
  73786. vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
  73787. vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
  73788. vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
  73789. vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
  73790. vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
  73791. vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
  73792. vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
  73793. vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
  73794. if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
  73795. vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
  73796. vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
  73797. vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
  73798. vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
  73799. vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
  73800. vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
  73801. vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
  73802. if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
  73803. if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
  73804. vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
  73805. vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
  73806. vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
  73807. vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
  73808. vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
  73809. if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
  73810. if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
  73811. vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
  73812. vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
  73813. vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
  73814. vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
  73815. vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
  73816. vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
  73817. vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
  73818. vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
  73819. vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
  73820. vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
  73821. if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
  73822. vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
  73823. vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
  73824. vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
  73825. vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
  73826. vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
  73827. vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
  73828. vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
  73829. vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
  73830. if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
  73831. vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
  73832. vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
  73833. vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
  73834. vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
  73835. vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
  73836. vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
  73837. vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
  73838. vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
  73839. vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
  73840. vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
  73841. vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
  73842. vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
  73843. vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
  73844. vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
  73845. vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
  73846. vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
  73847. vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
  73848. vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
  73849. vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
  73850. vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
  73851. vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
  73852. if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
  73853. vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
  73854. vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
  73855. vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
  73856. vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
  73857. vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
  73858. vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
  73859. vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
  73860. vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
  73861. vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
  73862. vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
  73863. vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
  73864. vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
  73865. vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
  73866. vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
  73867. vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
  73868. vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
  73869. vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
  73870. vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
  73871. vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
  73872. vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
  73873. vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
  73874. vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
  73875. vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
  73876. vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
  73877. vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
  73878. vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
  73879. vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
  73880. vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
  73881. vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
  73882. vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
  73883. vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
  73884. vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
  73885. vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
  73886. vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
  73887. vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
  73888. vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
  73889. vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
  73890. vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
  73891. vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
  73892. vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
  73893. vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
  73894. vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
  73895. vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
  73896. vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
  73897. vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
  73898. vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
  73899. vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
  73900. vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
  73901. vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
  73902. if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
  73903. vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
  73904. vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
  73905. vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
  73906. vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
  73907. vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
  73908. vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
  73909. vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
  73910. vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
  73911. vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
  73912. vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
  73913. vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
  73914. vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
  73915. vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
  73916. vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
  73917. vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
  73918. vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
  73919. if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
  73920. vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
  73921. vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
  73922. vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
  73923. if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
  73924. vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
  73925. vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
  73926. vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
  73927. vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
  73928. vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
  73929. vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
  73930. vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
  73931. vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
  73932. vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
  73933. vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
  73934. vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
  73935. vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
  73936. vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
  73937. vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
  73938. vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
  73939. vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
  73940. vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
  73941. vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
  73942. if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
  73943. vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
  73944. vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
  73945. vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
  73946. vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
  73947. vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
  73948. vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
  73949. vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
  73950. vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
  73951. vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
  73952. vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
  73953. vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
  73954. vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
  73955. vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
  73956. vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
  73957. vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
  73958. vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
  73959. if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
  73960. vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
  73961. vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
  73962. vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
  73963. vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
  73964. vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
  73965. vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
  73966. vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
  73967. vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
  73968. vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
  73969. vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
  73970. vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
  73971. vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
  73972. vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
  73973. vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
  73974. vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
  73975. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  73976. vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
  73977. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  73978. vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
  73979. vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
  73980. vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
  73981. if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
  73982. if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
  73983. vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
  73984. vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
  73985. vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
  73986. if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
  73987. vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
  73988. vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
  73989. if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
  73990. vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
  73991. vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
  73992. vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
  73993. vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
  73994. vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
  73995. if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
  73996. vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
  73997. vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
  73998. vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
  73999. if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
  74000. vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
  74001. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74002. vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
  74003. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74004. vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
  74005. vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
  74006. vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
  74007. vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
  74008. if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
  74009. vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
  74010. vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
  74011. vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
  74012. vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
  74013. vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
  74014. vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
  74015. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74016. vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
  74017. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74018. vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
  74019. vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
  74020. vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
  74021. vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
  74022. vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
  74023. if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
  74024. vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
  74025. vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
  74026. vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
  74027. if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
  74028. vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
  74029. vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
  74030. vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
  74031. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  74032. vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
  74033. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  74034. vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
  74035. vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
  74036. vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
  74037. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74038. vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
  74039. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74040. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74041. vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
  74042. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74043. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74044. vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
  74045. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74046. vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
  74047. vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
  74048. vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
  74049. vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
  74050. vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
  74051. vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
  74052. vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
  74053. vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
  74054. vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
  74055. vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
  74056. vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
  74057. vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
  74058. if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
  74059. vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
  74060. vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
  74061. vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
  74062. vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
  74063. vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
  74064. if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
  74065. vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
  74066. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74067. vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
  74068. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74069. vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
  74070. vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
  74071. vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
  74072. vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
  74073. vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
  74074. vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
  74075. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74076. vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
  74077. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74078. vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
  74079. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74080. vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
  74081. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74082. vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
  74083. vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
  74084. vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
  74085. vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
  74086. vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
  74087. vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
  74088. vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
  74089. vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
  74090. vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
  74091. vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
  74092. vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
  74093. vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
  74094. vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
  74095. vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
  74096. vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
  74097. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74098. vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
  74099. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74100. vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
  74101. vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
  74102. vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
  74103. vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
  74104. vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
  74105. vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
  74106. vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
  74107. vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
  74108. vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
  74109. if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
  74110. vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
  74111. vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
  74112. vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
  74113. vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
  74114. vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
  74115. vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
  74116. vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
  74117. vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
  74118. if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
  74119. vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
  74120. vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
  74121. if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
  74122. vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
  74123. vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
  74124. vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
  74125. vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
  74126. if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
  74127. vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
  74128. vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
  74129. vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
  74130. vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
  74131. if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
  74132. vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
  74133. }
  74134. void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
  74135. {
  74136. VkDevice device = static_cast<VkDevice>(deviceCpp);
  74137. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74138. vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
  74139. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74140. vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
  74141. vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
  74142. vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
  74143. vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
  74144. vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
  74145. vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
  74146. vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
  74147. vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
  74148. vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
  74149. vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
  74150. vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
  74151. vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
  74152. if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
  74153. vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
  74154. vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
  74155. vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
  74156. if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
  74157. vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
  74158. vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
  74159. vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
  74160. vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
  74161. vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
  74162. vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
  74163. vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
  74164. vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
  74165. if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
  74166. vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
  74167. vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
  74168. vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
  74169. vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
  74170. vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
  74171. vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
  74172. vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
  74173. vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
  74174. vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
  74175. vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
  74176. vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
  74177. vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
  74178. vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
  74179. vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
  74180. vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
  74181. vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
  74182. vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
  74183. vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
  74184. vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
  74185. vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
  74186. vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
  74187. vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
  74188. vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
  74189. vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
  74190. vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
  74191. vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
  74192. vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
  74193. vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
  74194. vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
  74195. vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
  74196. vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
  74197. vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
  74198. vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
  74199. vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
  74200. vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
  74201. vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
  74202. if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
  74203. vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
  74204. vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
  74205. vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
  74206. vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
  74207. vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
  74208. vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
  74209. vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
  74210. if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
  74211. if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
  74212. vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
  74213. vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
  74214. vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
  74215. vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
  74216. vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
  74217. if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
  74218. if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
  74219. vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
  74220. vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
  74221. vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
  74222. vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
  74223. vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
  74224. vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
  74225. vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
  74226. vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
  74227. vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
  74228. vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
  74229. if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
  74230. vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
  74231. vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
  74232. vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
  74233. vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
  74234. vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
  74235. vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
  74236. vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
  74237. vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
  74238. if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
  74239. vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
  74240. vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
  74241. vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
  74242. vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
  74243. vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
  74244. vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
  74245. vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
  74246. vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
  74247. vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
  74248. vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
  74249. vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
  74250. vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
  74251. vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
  74252. vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
  74253. vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
  74254. vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
  74255. vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
  74256. vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
  74257. vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
  74258. vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
  74259. vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
  74260. if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
  74261. vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
  74262. vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
  74263. vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
  74264. vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
  74265. vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
  74266. vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
  74267. vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
  74268. vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
  74269. vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
  74270. vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
  74271. vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
  74272. vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
  74273. vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
  74274. vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
  74275. vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
  74276. vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
  74277. vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
  74278. vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
  74279. vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
  74280. vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
  74281. vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
  74282. vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
  74283. vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
  74284. vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
  74285. vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
  74286. vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
  74287. vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
  74288. vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
  74289. vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
  74290. vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
  74291. vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
  74292. vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
  74293. vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
  74294. vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
  74295. vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
  74296. vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
  74297. vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
  74298. vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
  74299. vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
  74300. vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
  74301. vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
  74302. vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
  74303. vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
  74304. vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
  74305. vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
  74306. vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
  74307. vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
  74308. vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
  74309. vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
  74310. if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
  74311. vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
  74312. vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
  74313. vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
  74314. vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
  74315. vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
  74316. vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
  74317. vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
  74318. vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
  74319. vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
  74320. vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
  74321. vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
  74322. vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
  74323. vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
  74324. vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
  74325. vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
  74326. vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
  74327. if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
  74328. vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
  74329. vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
  74330. vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
  74331. if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
  74332. vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
  74333. vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
  74334. vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
  74335. vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
  74336. vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
  74337. vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
  74338. vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
  74339. vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
  74340. vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
  74341. vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
  74342. vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
  74343. vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
  74344. vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
  74345. vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
  74346. vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
  74347. vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
  74348. vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
  74349. vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
  74350. if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
  74351. vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
  74352. vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
  74353. vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
  74354. vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
  74355. vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
  74356. vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
  74357. vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
  74358. vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
  74359. vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
  74360. vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
  74361. vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
  74362. vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
  74363. vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
  74364. vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
  74365. vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
  74366. vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
  74367. if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
  74368. vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
  74369. vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
  74370. vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
  74371. vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
  74372. vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
  74373. vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
  74374. vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
  74375. vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
  74376. vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
  74377. vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
  74378. vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
  74379. vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
  74380. vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
  74381. vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
  74382. vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
  74383. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  74384. vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
  74385. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  74386. vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
  74387. vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
  74388. vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
  74389. if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
  74390. if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
  74391. vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
  74392. vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
  74393. vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
  74394. if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
  74395. vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
  74396. vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
  74397. if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
  74398. vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
  74399. vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
  74400. vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
  74401. vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
  74402. vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
  74403. if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
  74404. vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
  74405. vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
  74406. vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
  74407. if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
  74408. vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
  74409. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74410. vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
  74411. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74412. vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
  74413. vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
  74414. vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
  74415. vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
  74416. if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
  74417. vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
  74418. vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
  74419. vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
  74420. vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
  74421. vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
  74422. vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
  74423. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74424. vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
  74425. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74426. vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
  74427. vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
  74428. vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
  74429. vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
  74430. vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
  74431. if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
  74432. vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
  74433. vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
  74434. vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
  74435. if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
  74436. vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
  74437. vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
  74438. vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
  74439. #ifdef VK_USE_PLATFORM_ANDROID_KHR
  74440. vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
  74441. #endif /*VK_USE_PLATFORM_ANDROID_KHR*/
  74442. vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
  74443. vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
  74444. vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
  74445. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74446. vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
  74447. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74448. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74449. vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
  74450. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74451. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74452. vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
  74453. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74454. vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
  74455. vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
  74456. vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
  74457. vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
  74458. vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
  74459. vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
  74460. vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
  74461. vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
  74462. vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
  74463. vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
  74464. vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
  74465. vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
  74466. if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
  74467. vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
  74468. vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
  74469. vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
  74470. vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
  74471. vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
  74472. if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
  74473. vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
  74474. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74475. vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
  74476. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74477. vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
  74478. vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
  74479. vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
  74480. vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
  74481. vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
  74482. vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
  74483. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74484. vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
  74485. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74486. vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
  74487. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74488. vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
  74489. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74490. vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
  74491. vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
  74492. vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
  74493. vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
  74494. vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
  74495. vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
  74496. vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
  74497. vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
  74498. vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
  74499. vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
  74500. vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
  74501. vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
  74502. vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
  74503. vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
  74504. vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
  74505. #ifdef VK_USE_PLATFORM_WIN32_KHR
  74506. vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
  74507. #endif /*VK_USE_PLATFORM_WIN32_KHR*/
  74508. vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
  74509. vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
  74510. vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
  74511. vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
  74512. vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
  74513. vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
  74514. vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
  74515. vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
  74516. vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
  74517. if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
  74518. vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
  74519. vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
  74520. vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
  74521. vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
  74522. vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
  74523. vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
  74524. vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
  74525. vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
  74526. if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
  74527. vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
  74528. vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
  74529. if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
  74530. vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
  74531. vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
  74532. vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
  74533. vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
  74534. if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
  74535. vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
  74536. vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
  74537. vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
  74538. vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
  74539. if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
  74540. vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
  74541. }
  74542. };
  74543. } // namespace VULKAN_HPP_NAMESPACE
  74544. namespace std
  74545. {
  74546. template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>
  74547. {
  74548. std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureKHR const& accelerationStructureKHR) const VULKAN_HPP_NOEXCEPT
  74549. {
  74550. return std::hash<VkAccelerationStructureKHR>{}(static_cast<VkAccelerationStructureKHR>(accelerationStructureKHR));
  74551. }
  74552. };
  74553. template <> struct hash<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>
  74554. {
  74555. std::size_t operator()(VULKAN_HPP_NAMESPACE::AccelerationStructureNV const& accelerationStructureNV) const VULKAN_HPP_NOEXCEPT
  74556. {
  74557. return std::hash<VkAccelerationStructureNV>{}(static_cast<VkAccelerationStructureNV>(accelerationStructureNV));
  74558. }
  74559. };
  74560. template <> struct hash<VULKAN_HPP_NAMESPACE::Buffer>
  74561. {
  74562. std::size_t operator()(VULKAN_HPP_NAMESPACE::Buffer const& buffer) const VULKAN_HPP_NOEXCEPT
  74563. {
  74564. return std::hash<VkBuffer>{}(static_cast<VkBuffer>(buffer));
  74565. }
  74566. };
  74567. template <> struct hash<VULKAN_HPP_NAMESPACE::BufferView>
  74568. {
  74569. std::size_t operator()(VULKAN_HPP_NAMESPACE::BufferView const& bufferView) const VULKAN_HPP_NOEXCEPT
  74570. {
  74571. return std::hash<VkBufferView>{}(static_cast<VkBufferView>(bufferView));
  74572. }
  74573. };
  74574. template <> struct hash<VULKAN_HPP_NAMESPACE::CommandBuffer>
  74575. {
  74576. std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandBuffer const& commandBuffer) const VULKAN_HPP_NOEXCEPT
  74577. {
  74578. return std::hash<VkCommandBuffer>{}(static_cast<VkCommandBuffer>(commandBuffer));
  74579. }
  74580. };
  74581. template <> struct hash<VULKAN_HPP_NAMESPACE::CommandPool>
  74582. {
  74583. std::size_t operator()(VULKAN_HPP_NAMESPACE::CommandPool const& commandPool) const VULKAN_HPP_NOEXCEPT
  74584. {
  74585. return std::hash<VkCommandPool>{}(static_cast<VkCommandPool>(commandPool));
  74586. }
  74587. };
  74588. template <> struct hash<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>
  74589. {
  74590. std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT const& debugReportCallbackEXT) const VULKAN_HPP_NOEXCEPT
  74591. {
  74592. return std::hash<VkDebugReportCallbackEXT>{}(static_cast<VkDebugReportCallbackEXT>(debugReportCallbackEXT));
  74593. }
  74594. };
  74595. template <> struct hash<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>
  74596. {
  74597. std::size_t operator()(VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT const& debugUtilsMessengerEXT) const VULKAN_HPP_NOEXCEPT
  74598. {
  74599. return std::hash<VkDebugUtilsMessengerEXT>{}(static_cast<VkDebugUtilsMessengerEXT>(debugUtilsMessengerEXT));
  74600. }
  74601. };
  74602. template <> struct hash<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>
  74603. {
  74604. std::size_t operator()(VULKAN_HPP_NAMESPACE::DeferredOperationKHR const& deferredOperationKHR) const VULKAN_HPP_NOEXCEPT
  74605. {
  74606. return std::hash<VkDeferredOperationKHR>{}(static_cast<VkDeferredOperationKHR>(deferredOperationKHR));
  74607. }
  74608. };
  74609. template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorPool>
  74610. {
  74611. std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorPool const& descriptorPool) const VULKAN_HPP_NOEXCEPT
  74612. {
  74613. return std::hash<VkDescriptorPool>{}(static_cast<VkDescriptorPool>(descriptorPool));
  74614. }
  74615. };
  74616. template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSet>
  74617. {
  74618. std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSet const& descriptorSet) const VULKAN_HPP_NOEXCEPT
  74619. {
  74620. return std::hash<VkDescriptorSet>{}(static_cast<VkDescriptorSet>(descriptorSet));
  74621. }
  74622. };
  74623. template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>
  74624. {
  74625. std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorSetLayout const& descriptorSetLayout) const VULKAN_HPP_NOEXCEPT
  74626. {
  74627. return std::hash<VkDescriptorSetLayout>{}(static_cast<VkDescriptorSetLayout>(descriptorSetLayout));
  74628. }
  74629. };
  74630. template <> struct hash<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>
  74631. {
  74632. std::size_t operator()(VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate const& descriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT
  74633. {
  74634. return std::hash<VkDescriptorUpdateTemplate>{}(static_cast<VkDescriptorUpdateTemplate>(descriptorUpdateTemplate));
  74635. }
  74636. };
  74637. template <> struct hash<VULKAN_HPP_NAMESPACE::Device>
  74638. {
  74639. std::size_t operator()(VULKAN_HPP_NAMESPACE::Device const& device) const VULKAN_HPP_NOEXCEPT
  74640. {
  74641. return std::hash<VkDevice>{}(static_cast<VkDevice>(device));
  74642. }
  74643. };
  74644. template <> struct hash<VULKAN_HPP_NAMESPACE::DeviceMemory>
  74645. {
  74646. std::size_t operator()(VULKAN_HPP_NAMESPACE::DeviceMemory const& deviceMemory) const VULKAN_HPP_NOEXCEPT
  74647. {
  74648. return std::hash<VkDeviceMemory>{}(static_cast<VkDeviceMemory>(deviceMemory));
  74649. }
  74650. };
  74651. template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayKHR>
  74652. {
  74653. std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayKHR const& displayKHR) const VULKAN_HPP_NOEXCEPT
  74654. {
  74655. return std::hash<VkDisplayKHR>{}(static_cast<VkDisplayKHR>(displayKHR));
  74656. }
  74657. };
  74658. template <> struct hash<VULKAN_HPP_NAMESPACE::DisplayModeKHR>
  74659. {
  74660. std::size_t operator()(VULKAN_HPP_NAMESPACE::DisplayModeKHR const& displayModeKHR) const VULKAN_HPP_NOEXCEPT
  74661. {
  74662. return std::hash<VkDisplayModeKHR>{}(static_cast<VkDisplayModeKHR>(displayModeKHR));
  74663. }
  74664. };
  74665. template <> struct hash<VULKAN_HPP_NAMESPACE::Event>
  74666. {
  74667. std::size_t operator()(VULKAN_HPP_NAMESPACE::Event const& event) const VULKAN_HPP_NOEXCEPT
  74668. {
  74669. return std::hash<VkEvent>{}(static_cast<VkEvent>(event));
  74670. }
  74671. };
  74672. template <> struct hash<VULKAN_HPP_NAMESPACE::Fence>
  74673. {
  74674. std::size_t operator()(VULKAN_HPP_NAMESPACE::Fence const& fence) const VULKAN_HPP_NOEXCEPT
  74675. {
  74676. return std::hash<VkFence>{}(static_cast<VkFence>(fence));
  74677. }
  74678. };
  74679. template <> struct hash<VULKAN_HPP_NAMESPACE::Framebuffer>
  74680. {
  74681. std::size_t operator()(VULKAN_HPP_NAMESPACE::Framebuffer const& framebuffer) const VULKAN_HPP_NOEXCEPT
  74682. {
  74683. return std::hash<VkFramebuffer>{}(static_cast<VkFramebuffer>(framebuffer));
  74684. }
  74685. };
  74686. template <> struct hash<VULKAN_HPP_NAMESPACE::Image>
  74687. {
  74688. std::size_t operator()(VULKAN_HPP_NAMESPACE::Image const& image) const VULKAN_HPP_NOEXCEPT
  74689. {
  74690. return std::hash<VkImage>{}(static_cast<VkImage>(image));
  74691. }
  74692. };
  74693. template <> struct hash<VULKAN_HPP_NAMESPACE::ImageView>
  74694. {
  74695. std::size_t operator()(VULKAN_HPP_NAMESPACE::ImageView const& imageView) const VULKAN_HPP_NOEXCEPT
  74696. {
  74697. return std::hash<VkImageView>{}(static_cast<VkImageView>(imageView));
  74698. }
  74699. };
  74700. template <> struct hash<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>
  74701. {
  74702. std::size_t operator()(VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV const& indirectCommandsLayoutNV) const VULKAN_HPP_NOEXCEPT
  74703. {
  74704. return std::hash<VkIndirectCommandsLayoutNV>{}(static_cast<VkIndirectCommandsLayoutNV>(indirectCommandsLayoutNV));
  74705. }
  74706. };
  74707. template <> struct hash<VULKAN_HPP_NAMESPACE::Instance>
  74708. {
  74709. std::size_t operator()(VULKAN_HPP_NAMESPACE::Instance const& instance) const VULKAN_HPP_NOEXCEPT
  74710. {
  74711. return std::hash<VkInstance>{}(static_cast<VkInstance>(instance));
  74712. }
  74713. };
  74714. template <> struct hash<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>
  74715. {
  74716. std::size_t operator()(VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL const& performanceConfigurationINTEL) const VULKAN_HPP_NOEXCEPT
  74717. {
  74718. return std::hash<VkPerformanceConfigurationINTEL>{}(static_cast<VkPerformanceConfigurationINTEL>(performanceConfigurationINTEL));
  74719. }
  74720. };
  74721. template <> struct hash<VULKAN_HPP_NAMESPACE::PhysicalDevice>
  74722. {
  74723. std::size_t operator()(VULKAN_HPP_NAMESPACE::PhysicalDevice const& physicalDevice) const VULKAN_HPP_NOEXCEPT
  74724. {
  74725. return std::hash<VkPhysicalDevice>{}(static_cast<VkPhysicalDevice>(physicalDevice));
  74726. }
  74727. };
  74728. template <> struct hash<VULKAN_HPP_NAMESPACE::Pipeline>
  74729. {
  74730. std::size_t operator()(VULKAN_HPP_NAMESPACE::Pipeline const& pipeline) const VULKAN_HPP_NOEXCEPT
  74731. {
  74732. return std::hash<VkPipeline>{}(static_cast<VkPipeline>(pipeline));
  74733. }
  74734. };
  74735. template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineCache>
  74736. {
  74737. std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineCache const& pipelineCache) const VULKAN_HPP_NOEXCEPT
  74738. {
  74739. return std::hash<VkPipelineCache>{}(static_cast<VkPipelineCache>(pipelineCache));
  74740. }
  74741. };
  74742. template <> struct hash<VULKAN_HPP_NAMESPACE::PipelineLayout>
  74743. {
  74744. std::size_t operator()(VULKAN_HPP_NAMESPACE::PipelineLayout const& pipelineLayout) const VULKAN_HPP_NOEXCEPT
  74745. {
  74746. return std::hash<VkPipelineLayout>{}(static_cast<VkPipelineLayout>(pipelineLayout));
  74747. }
  74748. };
  74749. template <> struct hash<VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT>
  74750. {
  74751. std::size_t operator()(VULKAN_HPP_NAMESPACE::PrivateDataSlotEXT const& privateDataSlotEXT) const VULKAN_HPP_NOEXCEPT
  74752. {
  74753. return std::hash<VkPrivateDataSlotEXT>{}(static_cast<VkPrivateDataSlotEXT>(privateDataSlotEXT));
  74754. }
  74755. };
  74756. template <> struct hash<VULKAN_HPP_NAMESPACE::QueryPool>
  74757. {
  74758. std::size_t operator()(VULKAN_HPP_NAMESPACE::QueryPool const& queryPool) const VULKAN_HPP_NOEXCEPT
  74759. {
  74760. return std::hash<VkQueryPool>{}(static_cast<VkQueryPool>(queryPool));
  74761. }
  74762. };
  74763. template <> struct hash<VULKAN_HPP_NAMESPACE::Queue>
  74764. {
  74765. std::size_t operator()(VULKAN_HPP_NAMESPACE::Queue const& queue) const VULKAN_HPP_NOEXCEPT
  74766. {
  74767. return std::hash<VkQueue>{}(static_cast<VkQueue>(queue));
  74768. }
  74769. };
  74770. template <> struct hash<VULKAN_HPP_NAMESPACE::RenderPass>
  74771. {
  74772. std::size_t operator()(VULKAN_HPP_NAMESPACE::RenderPass const& renderPass) const VULKAN_HPP_NOEXCEPT
  74773. {
  74774. return std::hash<VkRenderPass>{}(static_cast<VkRenderPass>(renderPass));
  74775. }
  74776. };
  74777. template <> struct hash<VULKAN_HPP_NAMESPACE::Sampler>
  74778. {
  74779. std::size_t operator()(VULKAN_HPP_NAMESPACE::Sampler const& sampler) const VULKAN_HPP_NOEXCEPT
  74780. {
  74781. return std::hash<VkSampler>{}(static_cast<VkSampler>(sampler));
  74782. }
  74783. };
  74784. template <> struct hash<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>
  74785. {
  74786. std::size_t operator()(VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion const& samplerYcbcrConversion) const VULKAN_HPP_NOEXCEPT
  74787. {
  74788. return std::hash<VkSamplerYcbcrConversion>{}(static_cast<VkSamplerYcbcrConversion>(samplerYcbcrConversion));
  74789. }
  74790. };
  74791. template <> struct hash<VULKAN_HPP_NAMESPACE::Semaphore>
  74792. {
  74793. std::size_t operator()(VULKAN_HPP_NAMESPACE::Semaphore const& semaphore) const VULKAN_HPP_NOEXCEPT
  74794. {
  74795. return std::hash<VkSemaphore>{}(static_cast<VkSemaphore>(semaphore));
  74796. }
  74797. };
  74798. template <> struct hash<VULKAN_HPP_NAMESPACE::ShaderModule>
  74799. {
  74800. std::size_t operator()(VULKAN_HPP_NAMESPACE::ShaderModule const& shaderModule) const VULKAN_HPP_NOEXCEPT
  74801. {
  74802. return std::hash<VkShaderModule>{}(static_cast<VkShaderModule>(shaderModule));
  74803. }
  74804. };
  74805. template <> struct hash<VULKAN_HPP_NAMESPACE::SurfaceKHR>
  74806. {
  74807. std::size_t operator()(VULKAN_HPP_NAMESPACE::SurfaceKHR const& surfaceKHR) const VULKAN_HPP_NOEXCEPT
  74808. {
  74809. return std::hash<VkSurfaceKHR>{}(static_cast<VkSurfaceKHR>(surfaceKHR));
  74810. }
  74811. };
  74812. template <> struct hash<VULKAN_HPP_NAMESPACE::SwapchainKHR>
  74813. {
  74814. std::size_t operator()(VULKAN_HPP_NAMESPACE::SwapchainKHR const& swapchainKHR) const VULKAN_HPP_NOEXCEPT
  74815. {
  74816. return std::hash<VkSwapchainKHR>{}(static_cast<VkSwapchainKHR>(swapchainKHR));
  74817. }
  74818. };
  74819. template <> struct hash<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>
  74820. {
  74821. std::size_t operator()(VULKAN_HPP_NAMESPACE::ValidationCacheEXT const& validationCacheEXT) const VULKAN_HPP_NOEXCEPT
  74822. {
  74823. return std::hash<VkValidationCacheEXT>{}(static_cast<VkValidationCacheEXT>(validationCacheEXT));
  74824. }
  74825. };
  74826. }
  74827. #endif