SpvBuilder.cpp 175 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836
  1. //
  2. // Copyright (C) 2014-2015 LunarG, Inc.
  3. // Copyright (C) 2015-2018 Google, Inc.
  4. // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
  5. //
  6. // All rights reserved.
  7. //
  8. // Redistribution and use in source and binary forms, with or without
  9. // modification, are permitted provided that the following conditions
  10. // are met:
  11. //
  12. // Redistributions of source code must retain the above copyright
  13. // notice, this list of conditions and the following disclaimer.
  14. //
  15. // Redistributions in binary form must reproduce the above
  16. // copyright notice, this list of conditions and the following
  17. // disclaimer in the documentation and/or other materials provided
  18. // with the distribution.
  19. //
  20. // Neither the name of 3Dlabs Inc. Ltd. nor the names of its
  21. // contributors may be used to endorse or promote products derived
  22. // from this software without specific prior written permission.
  23. //
  24. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  25. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  26. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  27. // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  28. // COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  29. // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  30. // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  31. // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  32. // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  33. // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
  34. // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  35. // POSSIBILITY OF SUCH DAMAGE.
  36. //
  37. // Helper for making SPIR-V IR. Generally, this is documented in the header
  38. // SpvBuilder.h.
  39. //
  40. #include <cassert>
  41. #include <cstdlib>
  42. #include <unordered_set>
  43. #include <algorithm>
  44. #include "SpvBuilder.h"
  45. #include "spvUtil.h"
  46. #include "hex_float.h"
  47. #ifndef _WIN32
  48. #include <cstdio>
  49. #endif
  50. namespace spv {
  51. Builder::Builder(unsigned int spvVersion, unsigned int magicNumber, SpvBuildLogger* buildLogger) :
  52. spvVersion(spvVersion),
  53. sourceLang(SourceLanguage::Unknown),
  54. sourceVersion(0),
  55. addressModel(AddressingModel::Logical),
  56. memoryModel(MemoryModel::GLSL450),
  57. builderNumber(magicNumber),
  58. buildPoint(nullptr),
  59. uniqueId(0),
  60. entryPointFunction(nullptr),
  61. generatingOpCodeForSpecConst(false),
  62. logger(buildLogger)
  63. {
  64. clearAccessChain();
  65. }
  66. Builder::~Builder()
  67. {
  68. }
  69. Id Builder::import(const char* name)
  70. {
  71. Instruction* import = new Instruction(getUniqueId(), NoType, Op::OpExtInstImport);
  72. import->addStringOperand(name);
  73. module.mapInstruction(import);
  74. imports.push_back(std::unique_ptr<Instruction>(import));
  75. return import->getResultId();
  76. }
  77. // For creating new groupedTypes (will return old type if the requested one was already made).
  78. Id Builder::makeVoidType()
  79. {
  80. Instruction* type;
  81. if (groupedTypes[enumCast(Op::OpTypeVoid)].size() == 0) {
  82. Id typeId = getUniqueId();
  83. type = new Instruction(typeId, NoType, Op::OpTypeVoid);
  84. groupedTypes[enumCast(Op::OpTypeVoid)].push_back(type);
  85. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  86. module.mapInstruction(type);
  87. // Core OpTypeVoid used for debug void type
  88. if (emitNonSemanticShaderDebugInfo)
  89. debugTypeIdLookup[typeId] = typeId;
  90. } else
  91. type = groupedTypes[enumCast(Op::OpTypeVoid)].back();
  92. return type->getResultId();
  93. }
  94. Id Builder::makeBoolType()
  95. {
  96. Instruction* type;
  97. if (groupedTypes[enumCast(Op::OpTypeBool)].size() == 0) {
  98. type = new Instruction(getUniqueId(), NoType, Op::OpTypeBool);
  99. groupedTypes[enumCast(Op::OpTypeBool)].push_back(type);
  100. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  101. module.mapInstruction(type);
  102. if (emitNonSemanticShaderDebugInfo) {
  103. auto const debugResultId = makeBoolDebugType(32);
  104. debugTypeIdLookup[type->getResultId()] = debugResultId;
  105. }
  106. } else
  107. type = groupedTypes[enumCast(Op::OpTypeBool)].back();
  108. return type->getResultId();
  109. }
  110. Id Builder::makeSamplerType(const char* debugName)
  111. {
  112. Instruction* type;
  113. if (groupedTypes[enumCast(Op::OpTypeSampler)].size() == 0) {
  114. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampler);
  115. groupedTypes[enumCast(Op::OpTypeSampler)].push_back(type);
  116. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  117. module.mapInstruction(type);
  118. } else
  119. type = groupedTypes[enumCast(Op::OpTypeSampler)].back();
  120. if (emitNonSemanticShaderDebugInfo)
  121. {
  122. auto const debugResultId = makeOpaqueDebugType(debugName);
  123. debugTypeIdLookup[type->getResultId()] = debugResultId;
  124. }
  125. return type->getResultId();
  126. }
  127. Id Builder::makePointer(StorageClass storageClass, Id pointee)
  128. {
  129. // try to find it
  130. Instruction* type;
  131. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  132. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  133. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  134. type->getIdOperand(1) == pointee)
  135. return type->getResultId();
  136. }
  137. // not found, make it
  138. type = new Instruction(getUniqueId(), NoType, Op::OpTypePointer);
  139. type->reserveOperands(2);
  140. type->addImmediateOperand(storageClass);
  141. type->addIdOperand(pointee);
  142. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  143. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  144. module.mapInstruction(type);
  145. if (emitNonSemanticShaderDebugInfo) {
  146. const Id debugResultId = makePointerDebugType(storageClass, pointee);
  147. debugTypeIdLookup[type->getResultId()] = debugResultId;
  148. }
  149. return type->getResultId();
  150. }
  151. Id Builder::makeForwardPointer(StorageClass storageClass)
  152. {
  153. // Caching/uniquifying doesn't work here, because we don't know the
  154. // pointee type and there can be multiple forward pointers of the same
  155. // storage type. Somebody higher up in the stack must keep track.
  156. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeForwardPointer);
  157. type->addImmediateOperand(storageClass);
  158. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  159. module.mapInstruction(type);
  160. if (emitNonSemanticShaderDebugInfo) {
  161. const Id debugResultId = makeForwardPointerDebugType(storageClass);
  162. debugTypeIdLookup[type->getResultId()] = debugResultId;
  163. }
  164. return type->getResultId();
  165. }
  166. Id Builder::makePointerFromForwardPointer(StorageClass storageClass, Id forwardPointerType, Id pointee)
  167. {
  168. // try to find it
  169. Instruction* type;
  170. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  171. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  172. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  173. type->getIdOperand(1) == pointee)
  174. return type->getResultId();
  175. }
  176. type = new Instruction(forwardPointerType, NoType, Op::OpTypePointer);
  177. type->reserveOperands(2);
  178. type->addImmediateOperand(storageClass);
  179. type->addIdOperand(pointee);
  180. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  181. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  182. module.mapInstruction(type);
  183. // If we are emitting nonsemantic debuginfo, we need to patch the debug pointer type
  184. // that was emitted alongside the forward pointer, now that we have a pointee debug
  185. // type for it to point to.
  186. if (emitNonSemanticShaderDebugInfo) {
  187. Instruction *debugForwardPointer = module.getInstruction(getDebugType(forwardPointerType));
  188. assert(getDebugType(pointee));
  189. debugForwardPointer->setIdOperand(2, getDebugType(pointee));
  190. }
  191. return type->getResultId();
  192. }
  193. Id Builder::makeIntegerType(int width, bool hasSign)
  194. {
  195. // try to find it
  196. Instruction* type;
  197. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeInt)].size(); ++t) {
  198. type = groupedTypes[enumCast(Op::OpTypeInt)][t];
  199. if (type->getImmediateOperand(0) == (unsigned)width &&
  200. type->getImmediateOperand(1) == (hasSign ? 1u : 0u))
  201. return type->getResultId();
  202. }
  203. // not found, make it
  204. type = new Instruction(getUniqueId(), NoType, Op::OpTypeInt);
  205. type->reserveOperands(2);
  206. type->addImmediateOperand(width);
  207. type->addImmediateOperand(hasSign ? 1 : 0);
  208. groupedTypes[enumCast(Op::OpTypeInt)].push_back(type);
  209. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  210. module.mapInstruction(type);
  211. // deal with capabilities
  212. switch (width) {
  213. case 8:
  214. case 16:
  215. // these are currently handled by storage-type declarations and post processing
  216. break;
  217. case 64:
  218. addCapability(Capability::Int64);
  219. break;
  220. default:
  221. break;
  222. }
  223. if (emitNonSemanticShaderDebugInfo)
  224. {
  225. auto const debugResultId = makeIntegerDebugType(width, hasSign);
  226. debugTypeIdLookup[type->getResultId()] = debugResultId;
  227. }
  228. return type->getResultId();
  229. }
  230. Id Builder::makeFloatType(int width)
  231. {
  232. // try to find it
  233. Instruction* type;
  234. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  235. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  236. if (type->getNumOperands() != 1) {
  237. continue;
  238. }
  239. if (type->getImmediateOperand(0) == (unsigned)width)
  240. return type->getResultId();
  241. }
  242. // not found, make it
  243. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  244. type->addImmediateOperand(width);
  245. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  246. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  247. module.mapInstruction(type);
  248. // deal with capabilities
  249. switch (width) {
  250. case 16:
  251. // currently handled by storage-type declarations and post processing
  252. break;
  253. case 64:
  254. addCapability(Capability::Float64);
  255. break;
  256. default:
  257. break;
  258. }
  259. if (emitNonSemanticShaderDebugInfo)
  260. {
  261. auto const debugResultId = makeFloatDebugType(width);
  262. debugTypeIdLookup[type->getResultId()] = debugResultId;
  263. }
  264. return type->getResultId();
  265. }
  266. Id Builder::makeBFloat16Type()
  267. {
  268. // try to find it
  269. Instruction* type;
  270. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  271. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  272. if (type->getNumOperands() != 2) {
  273. continue;
  274. }
  275. if (type->getImmediateOperand(0) == (unsigned)16 &&
  276. type->getImmediateOperand(1) == FPEncoding::BFloat16KHR)
  277. return type->getResultId();
  278. }
  279. // not found, make it
  280. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  281. type->addImmediateOperand(16);
  282. type->addImmediateOperand(FPEncoding::BFloat16KHR);
  283. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  284. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  285. module.mapInstruction(type);
  286. addExtension(spv::E_SPV_KHR_bfloat16);
  287. addCapability(Capability::BFloat16TypeKHR);
  288. #if 0
  289. // XXX not supported
  290. if (emitNonSemanticShaderDebugInfo)
  291. {
  292. auto const debugResultId = makeFloatDebugType(width);
  293. debugTypeIdLookup[type->getResultId()] = debugResultId;
  294. }
  295. #endif
  296. return type->getResultId();
  297. }
  298. Id Builder::makeFloatE5M2Type()
  299. {
  300. // try to find it
  301. Instruction* type;
  302. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  303. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  304. if (type->getNumOperands() != 2) {
  305. continue;
  306. }
  307. if (type->getImmediateOperand(0) == (unsigned)8 &&
  308. type->getImmediateOperand(1) == FPEncoding::Float8E5M2EXT)
  309. return type->getResultId();
  310. }
  311. // not found, make it
  312. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  313. type->addImmediateOperand(8);
  314. type->addImmediateOperand(FPEncoding::Float8E5M2EXT);
  315. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  316. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  317. module.mapInstruction(type);
  318. addExtension(spv::E_SPV_EXT_float8);
  319. addCapability(Capability::Float8EXT);
  320. #if 0
  321. // XXX not supported
  322. if (emitNonSemanticShaderDebugInfo)
  323. {
  324. auto const debugResultId = makeFloatDebugType(width);
  325. debugTypeIdLookup[type->getResultId()] = debugResultId;
  326. }
  327. #endif
  328. return type->getResultId();
  329. }
  330. Id Builder::makeFloatE4M3Type()
  331. {
  332. // try to find it
  333. Instruction* type;
  334. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  335. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  336. if (type->getNumOperands() != 2) {
  337. continue;
  338. }
  339. if (type->getImmediateOperand(0) == (unsigned)8 &&
  340. type->getImmediateOperand(1) == FPEncoding::Float8E4M3EXT)
  341. return type->getResultId();
  342. }
  343. // not found, make it
  344. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  345. type->addImmediateOperand(8);
  346. type->addImmediateOperand(FPEncoding::Float8E4M3EXT);
  347. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  348. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  349. module.mapInstruction(type);
  350. addExtension(spv::E_SPV_EXT_float8);
  351. addCapability(Capability::Float8EXT);
  352. #if 0
  353. // XXX not supported
  354. if (emitNonSemanticShaderDebugInfo)
  355. {
  356. auto const debugResultId = makeFloatDebugType(width);
  357. debugTypeIdLookup[type->getResultId()] = debugResultId;
  358. }
  359. #endif
  360. return type->getResultId();
  361. }
  362. // Make a struct without checking for duplication.
  363. // See makeStructResultType() for non-decorated structs
  364. // needed as the result of some instructions, which does
  365. // check for duplicates.
  366. // For compiler-generated structs, debug info is ignored.
  367. Id Builder::makeStructType(const std::vector<Id>& members, const std::vector<spv::StructMemberDebugInfo>& memberDebugInfo,
  368. const char* name, bool const compilerGenerated)
  369. {
  370. // Don't look for previous one, because in the general case,
  371. // structs can be duplicated except for decorations.
  372. // not found, make it
  373. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeStruct);
  374. for (int op = 0; op < (int)members.size(); ++op)
  375. type->addIdOperand(members[op]);
  376. groupedTypes[enumCast(Op::OpTypeStruct)].push_back(type);
  377. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  378. module.mapInstruction(type);
  379. addName(type->getResultId(), name);
  380. if (emitNonSemanticShaderDebugInfo && !compilerGenerated) {
  381. assert(members.size() == memberDebugInfo.size());
  382. auto const debugResultId =
  383. makeCompositeDebugType(members, memberDebugInfo, name, NonSemanticShaderDebugInfo100Structure);
  384. debugTypeIdLookup[type->getResultId()] = debugResultId;
  385. }
  386. return type->getResultId();
  387. }
  388. // Make a struct for the simple results of several instructions,
  389. // checking for duplication.
  390. Id Builder::makeStructResultType(Id type0, Id type1)
  391. {
  392. // try to find it
  393. Instruction* type;
  394. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeStruct)].size(); ++t) {
  395. type = groupedTypes[enumCast(Op::OpTypeStruct)][t];
  396. if (type->getNumOperands() != 2)
  397. continue;
  398. if (type->getIdOperand(0) != type0 ||
  399. type->getIdOperand(1) != type1)
  400. continue;
  401. return type->getResultId();
  402. }
  403. // not found, make it
  404. std::vector<spv::Id> members;
  405. members.push_back(type0);
  406. members.push_back(type1);
  407. return makeStructType(members, {}, "ResType");
  408. }
  409. Id Builder::makeVectorType(Id component, int size)
  410. {
  411. // try to find it
  412. Instruction* type;
  413. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeVector)].size(); ++t) {
  414. type = groupedTypes[enumCast(Op::OpTypeVector)][t];
  415. if (type->getIdOperand(0) == component &&
  416. type->getImmediateOperand(1) == (unsigned)size)
  417. return type->getResultId();
  418. }
  419. // not found, make it
  420. type = new Instruction(getUniqueId(), NoType, Op::OpTypeVector);
  421. type->reserveOperands(2);
  422. type->addIdOperand(component);
  423. type->addImmediateOperand(size);
  424. groupedTypes[enumCast(Op::OpTypeVector)].push_back(type);
  425. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  426. module.mapInstruction(type);
  427. if (emitNonSemanticShaderDebugInfo)
  428. {
  429. auto const debugResultId = makeVectorDebugType(component, size);
  430. debugTypeIdLookup[type->getResultId()] = debugResultId;
  431. }
  432. return type->getResultId();
  433. }
  434. Id Builder::makeMatrixType(Id component, int cols, int rows)
  435. {
  436. assert(cols <= maxMatrixSize && rows <= maxMatrixSize);
  437. Id column = makeVectorType(component, rows);
  438. // try to find it
  439. Instruction* type;
  440. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeMatrix)].size(); ++t) {
  441. type = groupedTypes[enumCast(Op::OpTypeMatrix)][t];
  442. if (type->getIdOperand(0) == column &&
  443. type->getImmediateOperand(1) == (unsigned)cols)
  444. return type->getResultId();
  445. }
  446. // not found, make it
  447. type = new Instruction(getUniqueId(), NoType, Op::OpTypeMatrix);
  448. type->reserveOperands(2);
  449. type->addIdOperand(column);
  450. type->addImmediateOperand(cols);
  451. groupedTypes[enumCast(Op::OpTypeMatrix)].push_back(type);
  452. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  453. module.mapInstruction(type);
  454. if (emitNonSemanticShaderDebugInfo)
  455. {
  456. auto const debugResultId = makeMatrixDebugType(column, cols);
  457. debugTypeIdLookup[type->getResultId()] = debugResultId;
  458. }
  459. return type->getResultId();
  460. }
  461. Id Builder::makeCooperativeMatrixTypeKHR(Id component, Id scope, Id rows, Id cols, Id use)
  462. {
  463. // try to find it
  464. Instruction* type;
  465. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].size(); ++t) {
  466. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)][t];
  467. if (type->getIdOperand(0) == component &&
  468. type->getIdOperand(1) == scope &&
  469. type->getIdOperand(2) == rows &&
  470. type->getIdOperand(3) == cols &&
  471. type->getIdOperand(4) == use)
  472. return type->getResultId();
  473. }
  474. // not found, make it
  475. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixKHR);
  476. type->reserveOperands(5);
  477. type->addIdOperand(component);
  478. type->addIdOperand(scope);
  479. type->addIdOperand(rows);
  480. type->addIdOperand(cols);
  481. type->addIdOperand(use);
  482. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].push_back(type);
  483. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  484. module.mapInstruction(type);
  485. if (emitNonSemanticShaderDebugInfo)
  486. {
  487. // Find a name for one of the parameters. It can either come from debuginfo for another
  488. // type, or an OpName from a constant.
  489. auto const findName = [&](Id id) {
  490. Id id2 = getDebugType(id);
  491. for (auto &t : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic]) {
  492. if (t->getResultId() == id2) {
  493. for (auto &s : strings) {
  494. if (s->getResultId() == t->getIdOperand(2)) {
  495. return s->getNameString();
  496. }
  497. }
  498. }
  499. }
  500. for (auto &t : names) {
  501. if (t->getIdOperand(0) == id) {
  502. return t->getNameString();
  503. }
  504. }
  505. return "unknown";
  506. };
  507. std::string debugName = "coopmat<";
  508. debugName += std::string(findName(component)) + ", ";
  509. if (isConstantScalar(scope)) {
  510. debugName += std::string("gl_Scope") + std::string(spv::ScopeToString((spv::Scope)getConstantScalar(scope))) + ", ";
  511. } else {
  512. debugName += std::string(findName(scope)) + ", ";
  513. }
  514. debugName += std::string(findName(rows)) + ", ";
  515. debugName += std::string(findName(cols)) + ">";
  516. // There's no nonsemantic debug info instruction for cooperative matrix types,
  517. // use opaque composite instead.
  518. auto const debugResultId = makeOpaqueDebugType(debugName.c_str());
  519. debugTypeIdLookup[type->getResultId()] = debugResultId;
  520. }
  521. return type->getResultId();
  522. }
  523. Id Builder::makeCooperativeMatrixTypeNV(Id component, Id scope, Id rows, Id cols)
  524. {
  525. // try to find it
  526. Instruction* type;
  527. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].size(); ++t) {
  528. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)][t];
  529. if (type->getIdOperand(0) == component && type->getIdOperand(1) == scope && type->getIdOperand(2) == rows &&
  530. type->getIdOperand(3) == cols)
  531. return type->getResultId();
  532. }
  533. // not found, make it
  534. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixNV);
  535. type->reserveOperands(4);
  536. type->addIdOperand(component);
  537. type->addIdOperand(scope);
  538. type->addIdOperand(rows);
  539. type->addIdOperand(cols);
  540. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].push_back(type);
  541. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  542. module.mapInstruction(type);
  543. return type->getResultId();
  544. }
  545. Id Builder::makeCooperativeMatrixTypeWithSameShape(Id component, Id otherType)
  546. {
  547. Instruction* instr = module.getInstruction(otherType);
  548. if (instr->getOpCode() == Op::OpTypeCooperativeMatrixNV) {
  549. return makeCooperativeMatrixTypeNV(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3));
  550. } else {
  551. assert(instr->getOpCode() == Op::OpTypeCooperativeMatrixKHR);
  552. return makeCooperativeMatrixTypeKHR(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3), instr->getIdOperand(4));
  553. }
  554. }
  555. Id Builder::makeCooperativeVectorTypeNV(Id componentType, Id components)
  556. {
  557. // try to find it
  558. Instruction* type;
  559. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].size(); ++t) {
  560. type = groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)][t];
  561. if (type->getIdOperand(0) == componentType &&
  562. type->getIdOperand(1) == components)
  563. return type->getResultId();
  564. }
  565. // not found, make it
  566. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeVectorNV);
  567. type->addIdOperand(componentType);
  568. type->addIdOperand(components);
  569. groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].push_back(type);
  570. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  571. module.mapInstruction(type);
  572. return type->getResultId();
  573. }
  574. Id Builder::makeTensorTypeARM(Id elementType, Id rank)
  575. {
  576. // See if an OpTypeTensorARM with same element type and rank already exists.
  577. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeTensorARM)].size(); ++t) {
  578. const Instruction *type = groupedTypes[enumCast(Op::OpTypeTensorARM)][t];
  579. if (type->getIdOperand(0) == elementType && type->getIdOperand(1) == rank)
  580. return type->getResultId();
  581. }
  582. // Not found, make it.
  583. std::unique_ptr<Instruction> type(new Instruction(getUniqueId(), NoType, Op::OpTypeTensorARM));
  584. type->addIdOperand(elementType);
  585. type->addIdOperand(rank);
  586. groupedTypes[enumCast(Op::OpTypeTensorARM)].push_back(type.get());
  587. module.mapInstruction(type.get());
  588. Id resultID = type->getResultId();
  589. constantsTypesGlobals.push_back(std::move(type));
  590. return resultID;
  591. }
  592. Id Builder::makeGenericType(spv::Op opcode, std::vector<spv::IdImmediate>& operands)
  593. {
  594. // try to find it
  595. Instruction* type;
  596. for (int t = 0; t < (int)groupedTypes[enumCast(opcode)].size(); ++t) {
  597. type = groupedTypes[enumCast(opcode)][t];
  598. if (static_cast<size_t>(type->getNumOperands()) != operands.size())
  599. continue; // Number mismatch, find next
  600. bool match = true;
  601. for (int op = 0; match && op < (int)operands.size(); ++op) {
  602. match = (operands[op].isId ? type->getIdOperand(op) : type->getImmediateOperand(op)) == operands[op].word;
  603. }
  604. if (match)
  605. return type->getResultId();
  606. }
  607. // not found, make it
  608. type = new Instruction(getUniqueId(), NoType, opcode);
  609. type->reserveOperands(operands.size());
  610. for (size_t op = 0; op < operands.size(); ++op) {
  611. if (operands[op].isId)
  612. type->addIdOperand(operands[op].word);
  613. else
  614. type->addImmediateOperand(operands[op].word);
  615. }
  616. groupedTypes[enumCast(opcode)].push_back(type);
  617. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  618. module.mapInstruction(type);
  619. return type->getResultId();
  620. }
  621. // TODO: performance: track arrays per stride
  622. // If a stride is supplied (non-zero) make an array.
  623. // If no stride (0), reuse previous array types.
  624. // 'size' is an Id of a constant or specialization constant of the array size
  625. Id Builder::makeArrayType(Id element, Id sizeId, int stride)
  626. {
  627. Instruction* type;
  628. if (stride == 0) {
  629. // try to find existing type
  630. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeArray)].size(); ++t) {
  631. type = groupedTypes[enumCast(Op::OpTypeArray)][t];
  632. if (type->getIdOperand(0) == element &&
  633. type->getIdOperand(1) == sizeId &&
  634. explicitlyLaidOut.find(type->getResultId()) == explicitlyLaidOut.end())
  635. return type->getResultId();
  636. }
  637. }
  638. // not found, make it
  639. type = new Instruction(getUniqueId(), NoType, Op::OpTypeArray);
  640. type->reserveOperands(2);
  641. type->addIdOperand(element);
  642. type->addIdOperand(sizeId);
  643. groupedTypes[enumCast(Op::OpTypeArray)].push_back(type);
  644. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  645. module.mapInstruction(type);
  646. if (stride != 0) {
  647. explicitlyLaidOut.insert(type->getResultId());
  648. }
  649. if (emitNonSemanticShaderDebugInfo)
  650. {
  651. auto const debugResultId = makeArrayDebugType(element, sizeId);
  652. debugTypeIdLookup[type->getResultId()] = debugResultId;
  653. }
  654. return type->getResultId();
  655. }
  656. Id Builder::makeRuntimeArray(Id element)
  657. {
  658. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeRuntimeArray);
  659. type->addIdOperand(element);
  660. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  661. module.mapInstruction(type);
  662. if (emitNonSemanticShaderDebugInfo)
  663. {
  664. auto const debugResultId = makeArrayDebugType(element, makeUintConstant(0));
  665. debugTypeIdLookup[type->getResultId()] = debugResultId;
  666. }
  667. return type->getResultId();
  668. }
  669. Id Builder::makeFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  670. {
  671. // try to find it
  672. Instruction* type;
  673. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFunction)].size(); ++t) {
  674. type = groupedTypes[enumCast(Op::OpTypeFunction)][t];
  675. if (type->getIdOperand(0) != returnType || (int)paramTypes.size() != type->getNumOperands() - 1)
  676. continue;
  677. bool mismatch = false;
  678. for (int p = 0; p < (int)paramTypes.size(); ++p) {
  679. if (paramTypes[p] != type->getIdOperand(p + 1)) {
  680. mismatch = true;
  681. break;
  682. }
  683. }
  684. if (! mismatch)
  685. {
  686. // If compiling HLSL, glslang will create a wrapper function around the entrypoint. Accordingly, a void(void)
  687. // function type is created for the wrapper function. However, nonsemantic shader debug information is disabled
  688. // while creating the HLSL wrapper. Consequently, if we encounter another void(void) function, we need to create
  689. // the associated debug function type if it hasn't been created yet.
  690. if(emitNonSemanticShaderDebugInfo && getDebugType(type->getResultId()) == NoType) {
  691. assert(sourceLang == spv::SourceLanguage::HLSL);
  692. assert(getTypeClass(returnType) == Op::OpTypeVoid && paramTypes.size() == 0);
  693. Id id = makeDebugFunctionType(returnType, {});
  694. debugTypeIdLookup[type->getResultId()] = id;
  695. }
  696. return type->getResultId();
  697. }
  698. }
  699. // not found, make it
  700. Id typeId = getUniqueId();
  701. type = new Instruction(typeId, NoType, Op::OpTypeFunction);
  702. type->reserveOperands(paramTypes.size() + 1);
  703. type->addIdOperand(returnType);
  704. for (int p = 0; p < (int)paramTypes.size(); ++p)
  705. type->addIdOperand(paramTypes[p]);
  706. groupedTypes[enumCast(Op::OpTypeFunction)].push_back(type);
  707. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  708. module.mapInstruction(type);
  709. // make debug type and map it
  710. if (emitNonSemanticShaderDebugInfo) {
  711. Id debugTypeId = makeDebugFunctionType(returnType, paramTypes);
  712. debugTypeIdLookup[typeId] = debugTypeId;
  713. }
  714. return type->getResultId();
  715. }
  716. Id Builder::makeDebugFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  717. {
  718. assert(getDebugType(returnType) != NoType);
  719. Id typeId = getUniqueId();
  720. auto type = new Instruction(typeId, makeVoidType(), Op::OpExtInst);
  721. type->reserveOperands(paramTypes.size() + 4);
  722. type->addIdOperand(nonSemanticShaderDebugInfo);
  723. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeFunction);
  724. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  725. type->addIdOperand(getDebugType(returnType));
  726. for (auto const paramType : paramTypes) {
  727. if (isPointerType(paramType) || isArrayType(paramType)) {
  728. type->addIdOperand(getDebugType(getContainedTypeId(paramType)));
  729. }
  730. else {
  731. type->addIdOperand(getDebugType(paramType));
  732. }
  733. }
  734. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  735. module.mapInstruction(type);
  736. return typeId;
  737. }
  738. Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled,
  739. ImageFormat format, const char* debugName)
  740. {
  741. assert(sampled == 1 || sampled == 2);
  742. // try to find it
  743. Instruction* type;
  744. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeImage)].size(); ++t) {
  745. type = groupedTypes[enumCast(Op::OpTypeImage)][t];
  746. if (type->getIdOperand(0) == sampledType &&
  747. type->getImmediateOperand(1) == (unsigned int)dim &&
  748. type->getImmediateOperand(2) == ( depth ? 1u : 0u) &&
  749. type->getImmediateOperand(3) == (arrayed ? 1u : 0u) &&
  750. type->getImmediateOperand(4) == ( ms ? 1u : 0u) &&
  751. type->getImmediateOperand(5) == sampled &&
  752. type->getImmediateOperand(6) == (unsigned int)format)
  753. return type->getResultId();
  754. }
  755. // not found, make it
  756. type = new Instruction(getUniqueId(), NoType, Op::OpTypeImage);
  757. type->reserveOperands(7);
  758. type->addIdOperand(sampledType);
  759. type->addImmediateOperand( dim);
  760. type->addImmediateOperand( depth ? 1 : 0);
  761. type->addImmediateOperand(arrayed ? 1 : 0);
  762. type->addImmediateOperand( ms ? 1 : 0);
  763. type->addImmediateOperand(sampled);
  764. type->addImmediateOperand((unsigned int)format);
  765. groupedTypes[enumCast(Op::OpTypeImage)].push_back(type);
  766. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  767. module.mapInstruction(type);
  768. // deal with capabilities
  769. switch (dim) {
  770. case Dim::Buffer:
  771. if (sampled == 1)
  772. addCapability(Capability::SampledBuffer);
  773. else
  774. addCapability(Capability::ImageBuffer);
  775. break;
  776. case Dim::Dim1D:
  777. if (sampled == 1)
  778. addCapability(Capability::Sampled1D);
  779. else
  780. addCapability(Capability::Image1D);
  781. break;
  782. case Dim::Cube:
  783. if (arrayed) {
  784. if (sampled == 1)
  785. addCapability(Capability::SampledCubeArray);
  786. else
  787. addCapability(Capability::ImageCubeArray);
  788. }
  789. break;
  790. case Dim::Rect:
  791. if (sampled == 1)
  792. addCapability(Capability::SampledRect);
  793. else
  794. addCapability(Capability::ImageRect);
  795. break;
  796. case Dim::SubpassData:
  797. addCapability(Capability::InputAttachment);
  798. break;
  799. default:
  800. break;
  801. }
  802. if (ms) {
  803. if (sampled == 2) {
  804. // Images used with subpass data are not storage
  805. // images, so don't require the capability for them.
  806. if (dim != Dim::SubpassData)
  807. addCapability(Capability::StorageImageMultisample);
  808. if (arrayed)
  809. addCapability(Capability::ImageMSArray);
  810. }
  811. }
  812. if (emitNonSemanticShaderDebugInfo)
  813. {
  814. auto const debugResultId = makeOpaqueDebugType(debugName);
  815. debugTypeIdLookup[type->getResultId()] = debugResultId;
  816. }
  817. return type->getResultId();
  818. }
  819. Id Builder::makeSampledImageType(Id imageType, const char* debugName)
  820. {
  821. // try to find it
  822. Instruction* type;
  823. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeSampledImage)].size(); ++t) {
  824. type = groupedTypes[enumCast(Op::OpTypeSampledImage)][t];
  825. if (type->getIdOperand(0) == imageType)
  826. return type->getResultId();
  827. }
  828. // not found, make it
  829. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampledImage);
  830. type->addIdOperand(imageType);
  831. groupedTypes[enumCast(Op::OpTypeSampledImage)].push_back(type);
  832. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  833. module.mapInstruction(type);
  834. if (emitNonSemanticShaderDebugInfo)
  835. {
  836. auto const debugResultId = makeOpaqueDebugType(debugName);
  837. debugTypeIdLookup[type->getResultId()] = debugResultId;
  838. }
  839. return type->getResultId();
  840. }
  841. Id Builder::makeDebugInfoNone()
  842. {
  843. if (debugInfoNone != 0)
  844. return debugInfoNone;
  845. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  846. inst->reserveOperands(2);
  847. inst->addIdOperand(nonSemanticShaderDebugInfo);
  848. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugInfoNone);
  849. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  850. module.mapInstruction(inst);
  851. debugInfoNone = inst->getResultId();
  852. return debugInfoNone;
  853. }
  854. Id Builder::makeBoolDebugType(int const size)
  855. {
  856. // try to find it
  857. Instruction* type;
  858. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  859. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  860. if (type->getIdOperand(0) == getStringId("bool") &&
  861. type->getIdOperand(1) == static_cast<unsigned int>(size) &&
  862. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Boolean)
  863. return type->getResultId();
  864. }
  865. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  866. type->reserveOperands(6);
  867. type->addIdOperand(nonSemanticShaderDebugInfo);
  868. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  869. type->addIdOperand(getStringId("bool")); // name id
  870. type->addIdOperand(makeUintConstant(size)); // size id
  871. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Boolean)); // encoding id
  872. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  873. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  874. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  875. module.mapInstruction(type);
  876. return type->getResultId();
  877. }
  878. Id Builder::makeIntegerDebugType(int const width, bool const hasSign)
  879. {
  880. const char* typeName = nullptr;
  881. switch (width) {
  882. case 8: typeName = hasSign ? "int8_t" : "uint8_t"; break;
  883. case 16: typeName = hasSign ? "int16_t" : "uint16_t"; break;
  884. case 64: typeName = hasSign ? "int64_t" : "uint64_t"; break;
  885. default: typeName = hasSign ? "int" : "uint";
  886. }
  887. auto nameId = getStringId(typeName);
  888. // try to find it
  889. Instruction* type;
  890. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  891. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  892. if (type->getIdOperand(0) == nameId &&
  893. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  894. type->getIdOperand(2) == (hasSign ? NonSemanticShaderDebugInfo100Signed : NonSemanticShaderDebugInfo100Unsigned))
  895. return type->getResultId();
  896. }
  897. // not found, make it
  898. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  899. type->reserveOperands(6);
  900. type->addIdOperand(nonSemanticShaderDebugInfo);
  901. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  902. type->addIdOperand(nameId); // name id
  903. type->addIdOperand(makeUintConstant(width)); // size id
  904. if(hasSign == true) {
  905. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Signed)); // encoding id
  906. } else {
  907. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Unsigned)); // encoding id
  908. }
  909. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  910. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  911. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  912. module.mapInstruction(type);
  913. return type->getResultId();
  914. }
  915. Id Builder::makeFloatDebugType(int const width)
  916. {
  917. const char* typeName = nullptr;
  918. switch (width) {
  919. case 16: typeName = "float16_t"; break;
  920. case 64: typeName = "double"; break;
  921. default: typeName = "float"; break;
  922. }
  923. auto nameId = getStringId(typeName);
  924. // try to find it
  925. Instruction* type;
  926. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  927. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  928. if (type->getIdOperand(0) == nameId &&
  929. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  930. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Float)
  931. return type->getResultId();
  932. }
  933. // not found, make it
  934. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  935. type->reserveOperands(6);
  936. type->addIdOperand(nonSemanticShaderDebugInfo);
  937. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  938. type->addIdOperand(nameId); // name id
  939. type->addIdOperand(makeUintConstant(width)); // size id
  940. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Float)); // encoding id
  941. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  942. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  943. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  944. module.mapInstruction(type);
  945. return type->getResultId();
  946. }
  947. Id Builder::makeSequentialDebugType(Id const baseType, Id const componentCount, NonSemanticShaderDebugInfo100Instructions const sequenceType)
  948. {
  949. assert(sequenceType == NonSemanticShaderDebugInfo100DebugTypeArray ||
  950. sequenceType == NonSemanticShaderDebugInfo100DebugTypeVector);
  951. // try to find it
  952. Instruction* type;
  953. for (int t = 0; t < (int)groupedDebugTypes[sequenceType].size(); ++t) {
  954. type = groupedDebugTypes[sequenceType][t];
  955. if (type->getIdOperand(0) == baseType &&
  956. type->getIdOperand(1) == makeUintConstant(componentCount))
  957. return type->getResultId();
  958. }
  959. // not found, make it
  960. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  961. type->reserveOperands(4);
  962. type->addIdOperand(nonSemanticShaderDebugInfo);
  963. type->addImmediateOperand(sequenceType);
  964. type->addIdOperand(getDebugType(baseType)); // base type
  965. type->addIdOperand(componentCount); // component count
  966. groupedDebugTypes[sequenceType].push_back(type);
  967. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  968. module.mapInstruction(type);
  969. return type->getResultId();
  970. }
  971. Id Builder::makeArrayDebugType(Id const baseType, Id const componentCount)
  972. {
  973. return makeSequentialDebugType(baseType, componentCount, NonSemanticShaderDebugInfo100DebugTypeArray);
  974. }
  975. Id Builder::makeVectorDebugType(Id const baseType, int const componentCount)
  976. {
  977. return makeSequentialDebugType(baseType, makeUintConstant(componentCount), NonSemanticShaderDebugInfo100DebugTypeVector);
  978. }
  979. Id Builder::makeMatrixDebugType(Id const vectorType, int const vectorCount, bool columnMajor)
  980. {
  981. // try to find it
  982. Instruction* type;
  983. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].size(); ++t) {
  984. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix][t];
  985. if (type->getIdOperand(0) == vectorType &&
  986. type->getIdOperand(1) == makeUintConstant(vectorCount))
  987. return type->getResultId();
  988. }
  989. // not found, make it
  990. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  991. type->reserveOperands(5);
  992. type->addIdOperand(nonSemanticShaderDebugInfo);
  993. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMatrix);
  994. type->addIdOperand(getDebugType(vectorType)); // vector type id
  995. type->addIdOperand(makeUintConstant(vectorCount)); // component count id
  996. type->addIdOperand(makeBoolConstant(columnMajor)); // column-major id
  997. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].push_back(type);
  998. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  999. module.mapInstruction(type);
  1000. return type->getResultId();
  1001. }
  1002. Id Builder::makeMemberDebugType(Id const memberType, StructMemberDebugInfo const& debugTypeLoc)
  1003. {
  1004. assert(getDebugType(memberType) != NoType);
  1005. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1006. type->reserveOperands(10);
  1007. type->addIdOperand(nonSemanticShaderDebugInfo);
  1008. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMember);
  1009. type->addIdOperand(getStringId(debugTypeLoc.name)); // name id
  1010. type->addIdOperand(debugTypeLoc.debugTypeOverride != 0 ? debugTypeLoc.debugTypeOverride
  1011. : getDebugType(memberType)); // type id
  1012. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1013. type->addIdOperand(makeUintConstant(debugTypeLoc.line)); // line id TODO: currentLine is always zero
  1014. type->addIdOperand(makeUintConstant(debugTypeLoc.column)); // TODO: column id
  1015. type->addIdOperand(makeUintConstant(0)); // TODO: offset id
  1016. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1017. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1018. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMember].push_back(type);
  1019. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1020. module.mapInstruction(type);
  1021. return type->getResultId();
  1022. }
  1023. Id Builder::makeCompositeDebugType(std::vector<Id> const& memberTypes, std::vector<StructMemberDebugInfo> const& memberDebugInfo,
  1024. char const* const name, NonSemanticShaderDebugInfo100DebugCompositeType const tag)
  1025. {
  1026. // Create the debug member types.
  1027. std::vector<Id> memberDebugTypes;
  1028. assert(memberTypes.size() == memberDebugInfo.size());
  1029. for (size_t i = 0; i < memberTypes.size(); i++) {
  1030. if (getDebugType(memberTypes[i]) != NoType) {
  1031. memberDebugTypes.emplace_back(makeMemberDebugType(memberTypes[i], memberDebugInfo[i]));
  1032. }
  1033. }
  1034. // Create The structure debug type.
  1035. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1036. type->reserveOperands(memberDebugTypes.size() + 11);
  1037. type->addIdOperand(nonSemanticShaderDebugInfo);
  1038. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
  1039. type->addIdOperand(getStringId(name)); // name id
  1040. type->addIdOperand(makeUintConstant(tag)); // tag id
  1041. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1042. type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1043. type->addIdOperand(makeUintConstant(0)); // TODO: column id
  1044. type->addIdOperand(makeDebugCompilationUnit()); // scope id
  1045. type->addIdOperand(getStringId(name)); // linkage name id
  1046. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1047. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1048. for(auto const memberDebugType : memberDebugTypes) {
  1049. type->addIdOperand(memberDebugType);
  1050. }
  1051. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
  1052. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1053. module.mapInstruction(type);
  1054. return type->getResultId();
  1055. }
  1056. // The NonSemantic Shader Debug Info doesn't really have a dedicated opcode for opaque types. Instead, we use DebugTypeComposite.
  1057. // To represent a source language opaque type, this instruction must have no Members operands, Size operand must be
  1058. // DebugInfoNone, and Name must start with @ to avoid clashes with user defined names.
  1059. Id Builder::makeOpaqueDebugType(char const* const name)
  1060. {
  1061. // Create The structure debug type.
  1062. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1063. type->reserveOperands(11);
  1064. type->addIdOperand(nonSemanticShaderDebugInfo);
  1065. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
  1066. type->addIdOperand(getStringId(name)); // name id
  1067. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Structure)); // tag id
  1068. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1069. type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1070. type->addIdOperand(makeUintConstant(0)); // TODO: column id
  1071. type->addIdOperand(makeDebugCompilationUnit()); // scope id
  1072. // Prepend '@' to opaque types.
  1073. type->addIdOperand(getStringId('@' + std::string(name))); // linkage name id
  1074. type->addIdOperand(makeDebugInfoNone()); // size id
  1075. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1076. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
  1077. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1078. module.mapInstruction(type);
  1079. return type->getResultId();
  1080. }
  1081. Id Builder::makePointerDebugType(StorageClass storageClass, Id const baseType)
  1082. {
  1083. const Id debugBaseType = getDebugType(baseType);
  1084. if (!debugBaseType) {
  1085. return makeDebugInfoNone();
  1086. }
  1087. const Id scID = makeUintConstant(storageClass);
  1088. for (Instruction* otherType : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer]) {
  1089. if (otherType->getIdOperand(2) == debugBaseType &&
  1090. otherType->getIdOperand(3) == scID) {
  1091. return otherType->getResultId();
  1092. }
  1093. }
  1094. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1095. type->reserveOperands(5);
  1096. type->addIdOperand(nonSemanticShaderDebugInfo);
  1097. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1098. type->addIdOperand(debugBaseType);
  1099. type->addIdOperand(scID);
  1100. type->addIdOperand(makeUintConstant(0));
  1101. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1102. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1103. module.mapInstruction(type);
  1104. return type->getResultId();
  1105. }
  1106. // Emit a OpExtInstWithForwardRefsKHR nonsemantic instruction for a pointer debug type
  1107. // where we don't have the pointee yet. Since we don't have the pointee yet, it just
  1108. // points to itself and we rely on patching it later.
  1109. Id Builder::makeForwardPointerDebugType(StorageClass storageClass)
  1110. {
  1111. const Id scID = makeUintConstant(storageClass);
  1112. this->addExtension(spv::E_SPV_KHR_relaxed_extended_instruction);
  1113. Instruction *type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInstWithForwardRefsKHR);
  1114. type->addIdOperand(nonSemanticShaderDebugInfo);
  1115. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1116. type->addIdOperand(type->getResultId());
  1117. type->addIdOperand(scID);
  1118. type->addIdOperand(makeUintConstant(0));
  1119. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1120. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1121. module.mapInstruction(type);
  1122. return type->getResultId();
  1123. }
  1124. Id Builder::makeDebugSource(const Id fileName) {
  1125. if (debugSourceId.find(fileName) != debugSourceId.end())
  1126. return debugSourceId[fileName];
  1127. spv::Id resultId = getUniqueId();
  1128. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1129. sourceInst->reserveOperands(3);
  1130. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1131. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSource);
  1132. sourceInst->addIdOperand(fileName);
  1133. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1134. module.mapInstruction(sourceInst);
  1135. if (emitNonSemanticShaderDebugSource) {
  1136. const int maxWordCount = 0xFFFF;
  1137. const int opSourceWordCount = 4;
  1138. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  1139. auto processDebugSource = [&](std::string source) {
  1140. if (source.size() > 0) {
  1141. int nextByte = 0;
  1142. while ((int)source.size() - nextByte > 0) {
  1143. auto subString = source.substr(nextByte, nonNullBytesPerInstruction);
  1144. auto sourceId = getStringId(subString);
  1145. if (nextByte == 0) {
  1146. // DebugSource
  1147. sourceInst->addIdOperand(sourceId);
  1148. } else {
  1149. // DebugSourceContinued
  1150. Instruction* sourceContinuedInst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1151. sourceContinuedInst->reserveOperands(2);
  1152. sourceContinuedInst->addIdOperand(nonSemanticShaderDebugInfo);
  1153. sourceContinuedInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSourceContinued);
  1154. sourceContinuedInst->addIdOperand(sourceId);
  1155. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceContinuedInst));
  1156. module.mapInstruction(sourceContinuedInst);
  1157. }
  1158. nextByte += nonNullBytesPerInstruction;
  1159. }
  1160. } else {
  1161. auto sourceId = getStringId(source);
  1162. sourceInst->addIdOperand(sourceId);
  1163. }
  1164. };
  1165. if (fileName == mainFileId) {
  1166. processDebugSource(sourceText);
  1167. } else {
  1168. auto incItr = includeFiles.find(fileName);
  1169. if (incItr != includeFiles.end()) {
  1170. processDebugSource(*incItr->second);
  1171. } else {
  1172. // We omit the optional source text item if not available in glslang
  1173. }
  1174. }
  1175. }
  1176. debugSourceId[fileName] = resultId;
  1177. return resultId;
  1178. }
  1179. Id Builder::makeDebugCompilationUnit() {
  1180. if (nonSemanticShaderCompilationUnitId != 0)
  1181. return nonSemanticShaderCompilationUnitId;
  1182. spv::Id resultId = getUniqueId();
  1183. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1184. sourceInst->reserveOperands(6);
  1185. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1186. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugCompilationUnit);
  1187. sourceInst->addIdOperand(makeUintConstant(1)); // TODO(greg-lunarg): Get rid of magic number
  1188. sourceInst->addIdOperand(makeUintConstant(4)); // TODO(greg-lunarg): Get rid of magic number
  1189. sourceInst->addIdOperand(makeDebugSource(mainFileId));
  1190. sourceInst->addIdOperand(makeUintConstant(sourceLang));
  1191. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1192. module.mapInstruction(sourceInst);
  1193. nonSemanticShaderCompilationUnitId = resultId;
  1194. // We can reasonably assume that makeDebugCompilationUnit will be called before any of
  1195. // debug-scope stack. Function scopes and lexical scopes will occur afterward.
  1196. assert(currentDebugScopeId.empty());
  1197. currentDebugScopeId.push(nonSemanticShaderCompilationUnitId);
  1198. return resultId;
  1199. }
  1200. Id Builder::createDebugGlobalVariable(Id const type, char const*const name, Id const variable)
  1201. {
  1202. assert(type != 0);
  1203. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1204. inst->reserveOperands(11);
  1205. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1206. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugGlobalVariable);
  1207. inst->addIdOperand(getStringId(name)); // name id
  1208. inst->addIdOperand(type); // type id
  1209. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1210. inst->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1211. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1212. inst->addIdOperand(makeDebugCompilationUnit()); // scope id
  1213. inst->addIdOperand(getStringId(name)); // linkage name id
  1214. inst->addIdOperand(variable); // variable id
  1215. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsDefinition)); // flags id
  1216. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1217. module.mapInstruction(inst);
  1218. return inst->getResultId();
  1219. }
  1220. Id Builder::createDebugLocalVariable(Id type, char const*const name, size_t const argNumber)
  1221. {
  1222. assert(name != nullptr);
  1223. assert(!currentDebugScopeId.empty());
  1224. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1225. inst->reserveOperands(9);
  1226. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1227. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLocalVariable);
  1228. inst->addIdOperand(getStringId(name)); // name id
  1229. inst->addIdOperand(type); // type id
  1230. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1231. inst->addIdOperand(makeUintConstant(currentLine)); // line id
  1232. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1233. inst->addIdOperand(currentDebugScopeId.top()); // scope id
  1234. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsLocal)); // flags id
  1235. if(argNumber != 0) {
  1236. inst->addIdOperand(makeUintConstant(static_cast<unsigned int>(argNumber)));
  1237. }
  1238. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1239. module.mapInstruction(inst);
  1240. return inst->getResultId();
  1241. }
  1242. Id Builder::makeDebugExpression()
  1243. {
  1244. if (debugExpression != 0)
  1245. return debugExpression;
  1246. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1247. inst->reserveOperands(2);
  1248. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1249. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugExpression);
  1250. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1251. module.mapInstruction(inst);
  1252. debugExpression = inst->getResultId();
  1253. return debugExpression;
  1254. }
  1255. Id Builder::makeDebugDeclare(Id const debugLocalVariable, Id const pointer)
  1256. {
  1257. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1258. inst->reserveOperands(5);
  1259. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1260. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugDeclare);
  1261. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1262. inst->addIdOperand(pointer); // pointer to local variable id
  1263. inst->addIdOperand(makeDebugExpression()); // expression id
  1264. addInstruction(std::unique_ptr<Instruction>(inst));
  1265. return inst->getResultId();
  1266. }
  1267. Id Builder::makeDebugValue(Id const debugLocalVariable, Id const value)
  1268. {
  1269. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1270. inst->reserveOperands(5);
  1271. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1272. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugValue);
  1273. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1274. inst->addIdOperand(value); // value of local variable id
  1275. inst->addIdOperand(makeDebugExpression()); // expression id
  1276. addInstruction(std::unique_ptr<Instruction>(inst));
  1277. return inst->getResultId();
  1278. }
  1279. Id Builder::makeAccelerationStructureType()
  1280. {
  1281. Instruction *type;
  1282. if (groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].size() == 0) {
  1283. type = new Instruction(getUniqueId(), NoType, Op::OpTypeAccelerationStructureKHR);
  1284. groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].push_back(type);
  1285. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1286. module.mapInstruction(type);
  1287. if (emitNonSemanticShaderDebugInfo) {
  1288. spv::Id debugType = makeOpaqueDebugType("accelerationStructure");
  1289. debugTypeIdLookup[type->getResultId()] = debugType;
  1290. }
  1291. } else {
  1292. type = groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].back();
  1293. }
  1294. return type->getResultId();
  1295. }
  1296. Id Builder::makeRayQueryType()
  1297. {
  1298. Instruction *type;
  1299. if (groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].size() == 0) {
  1300. type = new Instruction(getUniqueId(), NoType, Op::OpTypeRayQueryKHR);
  1301. groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].push_back(type);
  1302. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1303. module.mapInstruction(type);
  1304. if (emitNonSemanticShaderDebugInfo) {
  1305. spv::Id debugType = makeOpaqueDebugType("rayQuery");
  1306. debugTypeIdLookup[type->getResultId()] = debugType;
  1307. }
  1308. } else {
  1309. type = groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].back();
  1310. }
  1311. return type->getResultId();
  1312. }
  1313. Id Builder::makeHitObjectEXTType()
  1314. {
  1315. Instruction *type;
  1316. if (groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].size() == 0) {
  1317. type = new Instruction(getUniqueId(), NoType, Op::OpTypeHitObjectEXT);
  1318. groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].push_back(type);
  1319. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1320. module.mapInstruction(type);
  1321. } else {
  1322. type = groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].back();
  1323. }
  1324. return type->getResultId();
  1325. }
  1326. Id Builder::makeHitObjectNVType()
  1327. {
  1328. Instruction *type;
  1329. if (groupedTypes[enumCast(Op::OpTypeHitObjectNV)].size() == 0) {
  1330. type = new Instruction(getUniqueId(), NoType, Op::OpTypeHitObjectNV);
  1331. groupedTypes[enumCast(Op::OpTypeHitObjectNV)].push_back(type);
  1332. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1333. module.mapInstruction(type);
  1334. if (emitNonSemanticShaderDebugInfo) {
  1335. spv::Id debugType = makeOpaqueDebugType("hitObjectNV");
  1336. debugTypeIdLookup[type->getResultId()] = debugType;
  1337. }
  1338. } else {
  1339. type = groupedTypes[enumCast(Op::OpTypeHitObjectNV)].back();
  1340. }
  1341. return type->getResultId();
  1342. }
  1343. Id Builder::getDerefTypeId(Id resultId) const
  1344. {
  1345. Id typeId = getTypeId(resultId);
  1346. assert(isPointerType(typeId));
  1347. return module.getInstruction(typeId)->getIdOperand(1);
  1348. }
  1349. Op Builder::getMostBasicTypeClass(Id typeId) const
  1350. {
  1351. Instruction* instr = module.getInstruction(typeId);
  1352. Op typeClass = instr->getOpCode();
  1353. switch (typeClass)
  1354. {
  1355. case Op::OpTypeVector:
  1356. case Op::OpTypeMatrix:
  1357. case Op::OpTypeArray:
  1358. case Op::OpTypeRuntimeArray:
  1359. return getMostBasicTypeClass(instr->getIdOperand(0));
  1360. case Op::OpTypePointer:
  1361. return getMostBasicTypeClass(instr->getIdOperand(1));
  1362. default:
  1363. return typeClass;
  1364. }
  1365. }
  1366. unsigned int Builder::getNumTypeConstituents(Id typeId) const
  1367. {
  1368. Instruction* instr = module.getInstruction(typeId);
  1369. switch (instr->getOpCode())
  1370. {
  1371. case Op::OpTypeBool:
  1372. case Op::OpTypeInt:
  1373. case Op::OpTypeFloat:
  1374. case Op::OpTypePointer:
  1375. return 1;
  1376. case Op::OpTypeVector:
  1377. case Op::OpTypeMatrix:
  1378. return instr->getImmediateOperand(1);
  1379. case Op::OpTypeCooperativeVectorNV:
  1380. case Op::OpTypeArray:
  1381. {
  1382. Id lengthId = instr->getIdOperand(1);
  1383. return module.getInstruction(lengthId)->getImmediateOperand(0);
  1384. }
  1385. case Op::OpTypeStruct:
  1386. return instr->getNumOperands();
  1387. case Op::OpTypeCooperativeMatrixKHR:
  1388. case Op::OpTypeCooperativeMatrixNV:
  1389. // has only one constituent when used with OpCompositeConstruct.
  1390. return 1;
  1391. default:
  1392. assert(0);
  1393. return 1;
  1394. }
  1395. }
  1396. // Return the lowest-level type of scalar that an homogeneous composite is made out of.
  1397. // Typically, this is just to find out if something is made out of ints or floats.
  1398. // However, it includes returning a structure, if say, it is an array of structure.
  1399. Id Builder::getScalarTypeId(Id typeId) const
  1400. {
  1401. Instruction* instr = module.getInstruction(typeId);
  1402. Op typeClass = instr->getOpCode();
  1403. switch (typeClass)
  1404. {
  1405. case Op::OpTypeVoid:
  1406. case Op::OpTypeBool:
  1407. case Op::OpTypeInt:
  1408. case Op::OpTypeFloat:
  1409. case Op::OpTypeStruct:
  1410. return instr->getResultId();
  1411. case Op::OpTypeVector:
  1412. case Op::OpTypeMatrix:
  1413. case Op::OpTypeArray:
  1414. case Op::OpTypeRuntimeArray:
  1415. case Op::OpTypePointer:
  1416. case Op::OpTypeCooperativeVectorNV:
  1417. return getScalarTypeId(getContainedTypeId(typeId));
  1418. default:
  1419. assert(0);
  1420. return NoResult;
  1421. }
  1422. }
  1423. // Return the type of 'member' of a composite.
  1424. Id Builder::getContainedTypeId(Id typeId, int member) const
  1425. {
  1426. Instruction* instr = module.getInstruction(typeId);
  1427. Op typeClass = instr->getOpCode();
  1428. switch (typeClass)
  1429. {
  1430. case Op::OpTypeVector:
  1431. case Op::OpTypeMatrix:
  1432. case Op::OpTypeArray:
  1433. case Op::OpTypeRuntimeArray:
  1434. case Op::OpTypeCooperativeMatrixKHR:
  1435. case Op::OpTypeCooperativeMatrixNV:
  1436. case Op::OpTypeCooperativeVectorNV:
  1437. return instr->getIdOperand(0);
  1438. case Op::OpTypePointer:
  1439. return instr->getIdOperand(1);
  1440. case Op::OpTypeStruct:
  1441. return instr->getIdOperand(member);
  1442. default:
  1443. assert(0);
  1444. return NoResult;
  1445. }
  1446. }
  1447. // Figure out the final resulting type of the access chain.
  1448. Id Builder::getResultingAccessChainType() const
  1449. {
  1450. assert(accessChain.base != NoResult);
  1451. Id typeId = getTypeId(accessChain.base);
  1452. assert(isPointerType(typeId));
  1453. typeId = getContainedTypeId(typeId);
  1454. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  1455. if (isStructType(typeId)) {
  1456. assert(isConstantScalar(accessChain.indexChain[i]));
  1457. typeId = getContainedTypeId(typeId, getConstantScalar(accessChain.indexChain[i]));
  1458. } else
  1459. typeId = getContainedTypeId(typeId, accessChain.indexChain[i]);
  1460. }
  1461. return typeId;
  1462. }
  1463. // Return the immediately contained type of a given composite type.
  1464. Id Builder::getContainedTypeId(Id typeId) const
  1465. {
  1466. return getContainedTypeId(typeId, 0);
  1467. }
  1468. // Returns true if 'typeId' is or contains a scalar type declared with 'typeOp'
  1469. // of width 'width'. The 'width' is only consumed for int and float types.
  1470. // Returns false otherwise.
  1471. bool Builder::containsType(Id typeId, spv::Op typeOp, unsigned int width) const
  1472. {
  1473. const Instruction& instr = *module.getInstruction(typeId);
  1474. Op typeClass = instr.getOpCode();
  1475. switch (typeClass)
  1476. {
  1477. case Op::OpTypeInt:
  1478. case Op::OpTypeFloat:
  1479. return typeClass == typeOp && instr.getImmediateOperand(0) == width;
  1480. case Op::OpTypeStruct:
  1481. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1482. if (containsType(instr.getIdOperand(m), typeOp, width))
  1483. return true;
  1484. }
  1485. return false;
  1486. case Op::OpTypePointer:
  1487. return false;
  1488. case Op::OpTypeVector:
  1489. case Op::OpTypeMatrix:
  1490. case Op::OpTypeArray:
  1491. case Op::OpTypeRuntimeArray:
  1492. return containsType(getContainedTypeId(typeId), typeOp, width);
  1493. default:
  1494. return typeClass == typeOp;
  1495. }
  1496. }
  1497. // return true if the type is a pointer to PhysicalStorageBufferEXT or an
  1498. // contains such a pointer. These require restrict/aliased decorations.
  1499. bool Builder::containsPhysicalStorageBufferOrArray(Id typeId) const
  1500. {
  1501. const Instruction& instr = *module.getInstruction(typeId);
  1502. Op typeClass = instr.getOpCode();
  1503. switch (typeClass)
  1504. {
  1505. case Op::OpTypePointer:
  1506. return getTypeStorageClass(typeId) == StorageClass::PhysicalStorageBufferEXT;
  1507. case Op::OpTypeArray:
  1508. return containsPhysicalStorageBufferOrArray(getContainedTypeId(typeId));
  1509. case Op::OpTypeStruct:
  1510. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1511. if (containsPhysicalStorageBufferOrArray(instr.getIdOperand(m)))
  1512. return true;
  1513. }
  1514. return false;
  1515. default:
  1516. return false;
  1517. }
  1518. }
  1519. // See if a scalar constant of this type has already been created, so it
  1520. // can be reused rather than duplicated. (Required by the specification).
  1521. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned value)
  1522. {
  1523. ScalarConstantKey key{ enumCast(typeClass), enumCast(opcode), typeId, value, 0 };
  1524. auto it = groupedScalarConstantResultIDs.find(key);
  1525. return (it != groupedScalarConstantResultIDs.end()) ? it->second : 0;
  1526. }
  1527. // Version of findScalarConstant (see above) for scalars that take two operands (e.g. a 'double' or 'int64').
  1528. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned v1, unsigned v2)
  1529. {
  1530. ScalarConstantKey key{ enumCast(typeClass), enumCast(opcode), typeId, v1, v2 };
  1531. auto it = groupedScalarConstantResultIDs.find(key);
  1532. return (it != groupedScalarConstantResultIDs.end()) ? it->second : 0;
  1533. }
  1534. // Return true if consuming 'opcode' means consuming a constant.
  1535. // "constant" here means after final transform to executable code,
  1536. // the value consumed will be a constant, so includes specialization.
  1537. bool Builder::isConstantOpCode(Op opcode) const
  1538. {
  1539. switch (opcode) {
  1540. case Op::OpUndef:
  1541. case Op::OpConstantTrue:
  1542. case Op::OpConstantFalse:
  1543. case Op::OpConstant:
  1544. case Op::OpConstantComposite:
  1545. case Op::OpConstantCompositeReplicateEXT:
  1546. case Op::OpConstantSampler:
  1547. case Op::OpConstantNull:
  1548. case Op::OpSpecConstantTrue:
  1549. case Op::OpSpecConstantFalse:
  1550. case Op::OpSpecConstant:
  1551. case Op::OpSpecConstantComposite:
  1552. case Op::OpSpecConstantCompositeReplicateEXT:
  1553. case Op::OpSpecConstantOp:
  1554. return true;
  1555. default:
  1556. return false;
  1557. }
  1558. }
  1559. // Return true if consuming 'opcode' means consuming a specialization constant.
  1560. bool Builder::isSpecConstantOpCode(Op opcode) const
  1561. {
  1562. switch (opcode) {
  1563. case Op::OpSpecConstantTrue:
  1564. case Op::OpSpecConstantFalse:
  1565. case Op::OpSpecConstant:
  1566. case Op::OpSpecConstantComposite:
  1567. case Op::OpSpecConstantOp:
  1568. case Op::OpSpecConstantCompositeReplicateEXT:
  1569. return true;
  1570. default:
  1571. return false;
  1572. }
  1573. }
  1574. Id Builder::makeNullConstant(Id typeId)
  1575. {
  1576. Instruction* constant;
  1577. // See if we already made it.
  1578. Id existing = NoResult;
  1579. for (int i = 0; i < (int)nullConstants.size(); ++i) {
  1580. constant = nullConstants[i];
  1581. if (constant->getTypeId() == typeId)
  1582. existing = constant->getResultId();
  1583. }
  1584. if (existing != NoResult)
  1585. return existing;
  1586. // Make it
  1587. Instruction* c = new Instruction(getUniqueId(), typeId, Op::OpConstantNull);
  1588. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1589. nullConstants.push_back(c);
  1590. module.mapInstruction(c);
  1591. return c->getResultId();
  1592. }
  1593. Id Builder::makeBoolConstant(bool b, bool specConstant)
  1594. {
  1595. Id typeId = makeBoolType();
  1596. Op opcode = specConstant ? (b ? Op::OpSpecConstantTrue : Op::OpSpecConstantFalse) : (b ? Op::OpConstantTrue : Op::OpConstantFalse);
  1597. // See if we already made it. Applies only to regular constants, because specialization constants
  1598. // must remain distinct for the purpose of applying a SpecId decoration.
  1599. if (!specConstant) {
  1600. Id existing = findScalarConstant(Op::OpTypeBool, opcode, typeId, 0);
  1601. if (existing)
  1602. return existing;
  1603. }
  1604. // Make it
  1605. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1606. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1607. module.mapInstruction(c);
  1608. Id resultId = c->getResultId();
  1609. if (!specConstant) {
  1610. ScalarConstantKey key{enumCast(Op::OpTypeBool), enumCast(opcode), typeId, 0, 0};
  1611. groupedScalarConstantResultIDs[key] = resultId;
  1612. }
  1613. return resultId;
  1614. }
  1615. Id Builder::makeIntConstant(Id typeId, unsigned value, bool specConstant)
  1616. {
  1617. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1618. // See if we already made it. Applies only to regular constants, because specialization constants
  1619. // must remain distinct for the purpose of applying a SpecId decoration.
  1620. if (! specConstant) {
  1621. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, value);
  1622. if (existing)
  1623. return existing;
  1624. }
  1625. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1626. c->addImmediateOperand(value);
  1627. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1628. module.mapInstruction(c);
  1629. Id resultId = c->getResultId();
  1630. if (!specConstant) {
  1631. ScalarConstantKey key{ enumCast(Op::OpTypeInt), enumCast(opcode), typeId, value, 0 };
  1632. groupedScalarConstantResultIDs[key] = resultId;
  1633. }
  1634. return resultId;
  1635. }
  1636. Id Builder::makeInt64Constant(Id typeId, unsigned long long value, bool specConstant)
  1637. {
  1638. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1639. unsigned op1 = value & 0xFFFFFFFF;
  1640. unsigned op2 = value >> 32;
  1641. // See if we already made it. Applies only to regular constants, because specialization constants
  1642. // must remain distinct for the purpose of applying a SpecId decoration.
  1643. if (! specConstant) {
  1644. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, op1, op2);
  1645. if (existing)
  1646. return existing;
  1647. }
  1648. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1649. c->reserveOperands(2);
  1650. c->addImmediateOperand(op1);
  1651. c->addImmediateOperand(op2);
  1652. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1653. module.mapInstruction(c);
  1654. Id resultId = c->getResultId();
  1655. if (!specConstant) {
  1656. ScalarConstantKey key{ enumCast(Op::OpTypeInt), enumCast(opcode), typeId, op1, op2 };
  1657. groupedScalarConstantResultIDs[key] = resultId;
  1658. }
  1659. return resultId;
  1660. }
  1661. Id Builder::makeFloatConstant(float f, bool specConstant)
  1662. {
  1663. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1664. Id typeId = makeFloatType(32);
  1665. union { float fl; unsigned int ui; } u;
  1666. u.fl = f;
  1667. unsigned value = u.ui;
  1668. // See if we already made it. Applies only to regular constants, because specialization constants
  1669. // must remain distinct for the purpose of applying a SpecId decoration.
  1670. if (! specConstant) {
  1671. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1672. if (existing)
  1673. return existing;
  1674. }
  1675. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1676. c->addImmediateOperand(value);
  1677. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1678. module.mapInstruction(c);
  1679. Id resultId = c->getResultId();
  1680. if (!specConstant) {
  1681. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1682. groupedScalarConstantResultIDs[key] = resultId;
  1683. }
  1684. return resultId;
  1685. }
  1686. Id Builder::makeDoubleConstant(double d, bool specConstant)
  1687. {
  1688. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1689. Id typeId = makeFloatType(64);
  1690. union { double db; unsigned long long ull; } u;
  1691. u.db = d;
  1692. unsigned long long value = u.ull;
  1693. unsigned op1 = value & 0xFFFFFFFF;
  1694. unsigned op2 = value >> 32;
  1695. // See if we already made it. Applies only to regular constants, because specialization constants
  1696. // must remain distinct for the purpose of applying a SpecId decoration.
  1697. if (! specConstant) {
  1698. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, op1, op2);
  1699. if (existing)
  1700. return existing;
  1701. }
  1702. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1703. c->reserveOperands(2);
  1704. c->addImmediateOperand(op1);
  1705. c->addImmediateOperand(op2);
  1706. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1707. module.mapInstruction(c);
  1708. Id resultId = c->getResultId();
  1709. if (!specConstant) {
  1710. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, op1, op2 };
  1711. groupedScalarConstantResultIDs[key] = resultId;
  1712. }
  1713. return resultId;
  1714. }
  1715. Id Builder::makeFloat16Constant(float f16, bool specConstant)
  1716. {
  1717. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1718. Id typeId = makeFloatType(16);
  1719. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(f16);
  1720. spvutils::HexFloat<spvutils::FloatProxy<spvutils::Float16>> f16Val(0);
  1721. fVal.castTo(f16Val, spvutils::kRoundToZero);
  1722. unsigned value = f16Val.value().getAsFloat().get_value();
  1723. // See if we already made it. Applies only to regular constants, because specialization constants
  1724. // must remain distinct for the purpose of applying a SpecId decoration.
  1725. if (!specConstant) {
  1726. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1727. if (existing)
  1728. return existing;
  1729. }
  1730. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1731. c->addImmediateOperand(value);
  1732. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1733. module.mapInstruction(c);
  1734. Id resultId = c->getResultId();
  1735. if (!specConstant) {
  1736. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1737. groupedScalarConstantResultIDs[key] = resultId;
  1738. }
  1739. return resultId;
  1740. }
  1741. Id Builder::makeBFloat16Constant(float bf16, bool specConstant)
  1742. {
  1743. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1744. Id typeId = makeBFloat16Type();
  1745. union {
  1746. float f;
  1747. uint32_t u;
  1748. } un;
  1749. un.f = bf16;
  1750. // take high 16b of fp32 value. This is effectively round-to-zero, other than certain NaNs.
  1751. unsigned value = un.u >> 16;
  1752. // See if we already made it. Applies only to regular constants, because specialization constants
  1753. // must remain distinct for the purpose of applying a SpecId decoration.
  1754. if (!specConstant) {
  1755. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1756. if (existing)
  1757. return existing;
  1758. }
  1759. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1760. c->addImmediateOperand(value);
  1761. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1762. module.mapInstruction(c);
  1763. Id resultId = c->getResultId();
  1764. if (!specConstant) {
  1765. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1766. groupedScalarConstantResultIDs[key] = resultId;
  1767. }
  1768. return resultId;
  1769. }
  1770. Id Builder::makeFloatE5M2Constant(float fe5m2, bool specConstant)
  1771. {
  1772. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1773. Id typeId = makeFloatE5M2Type();
  1774. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe5m2);
  1775. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE5M2>> fe5m2Val(0);
  1776. fVal.castTo(fe5m2Val, spvutils::kRoundToZero);
  1777. unsigned value = fe5m2Val.value().getAsFloat().get_value();
  1778. // See if we already made it. Applies only to regular constants, because specialization constants
  1779. // must remain distinct for the purpose of applying a SpecId decoration.
  1780. if (!specConstant) {
  1781. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1782. if (existing)
  1783. return existing;
  1784. }
  1785. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1786. c->addImmediateOperand(value);
  1787. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1788. module.mapInstruction(c);
  1789. Id resultId = c->getResultId();
  1790. if (!specConstant) {
  1791. ScalarConstantKey key{enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0};
  1792. groupedScalarConstantResultIDs[key] = resultId;
  1793. }
  1794. return resultId;
  1795. }
  1796. Id Builder::makeFloatE4M3Constant(float fe4m3, bool specConstant)
  1797. {
  1798. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1799. Id typeId = makeFloatE4M3Type();
  1800. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe4m3);
  1801. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE4M3>> fe4m3Val(0);
  1802. fVal.castTo(fe4m3Val, spvutils::kRoundToZero);
  1803. unsigned value = fe4m3Val.value().getAsFloat().get_value();
  1804. // See if we already made it. Applies only to regular constants, because specialization constants
  1805. // must remain distinct for the purpose of applying a SpecId decoration.
  1806. if (!specConstant) {
  1807. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1808. if (existing)
  1809. return existing;
  1810. }
  1811. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1812. c->addImmediateOperand(value);
  1813. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1814. module.mapInstruction(c);
  1815. Id resultId = c->getResultId();
  1816. if (!specConstant) {
  1817. ScalarConstantKey key{enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0};
  1818. groupedScalarConstantResultIDs[key] = resultId;
  1819. }
  1820. return resultId;
  1821. }
  1822. Id Builder::makeFpConstant(Id type, double d, bool specConstant)
  1823. {
  1824. const int width = getScalarTypeWidth(type);
  1825. assert(isFloatType(type));
  1826. switch (width) {
  1827. case 16:
  1828. return makeFloat16Constant((float)d, specConstant);
  1829. case 32:
  1830. return makeFloatConstant((float)d, specConstant);
  1831. case 64:
  1832. return makeDoubleConstant(d, specConstant);
  1833. default:
  1834. break;
  1835. }
  1836. assert(false);
  1837. return NoResult;
  1838. }
  1839. Id Builder::importNonSemanticShaderDebugInfoInstructions()
  1840. {
  1841. assert(emitNonSemanticShaderDebugInfo == true);
  1842. if(nonSemanticShaderDebugInfo == 0)
  1843. {
  1844. this->addExtension(spv::E_SPV_KHR_non_semantic_info);
  1845. nonSemanticShaderDebugInfo = this->import("NonSemantic.Shader.DebugInfo.100");
  1846. }
  1847. return nonSemanticShaderDebugInfo;
  1848. }
  1849. Id Builder::findCompositeConstant(Op typeClass, Op opcode, Id typeId, const std::vector<Id>& comps, size_t numMembers)
  1850. {
  1851. Instruction* constant = nullptr;
  1852. bool found = false;
  1853. for (int i = 0; i < (int)groupedCompositeConstants[enumCast(typeClass)].size(); ++i) {
  1854. constant = groupedCompositeConstants[enumCast(typeClass)][i];
  1855. if (constant->getTypeId() != typeId)
  1856. continue;
  1857. if (constant->getOpCode() != opcode) {
  1858. continue;
  1859. }
  1860. if (constant->getNumOperands() != (int)numMembers)
  1861. continue;
  1862. // same contents?
  1863. bool mismatch = false;
  1864. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1865. if (constant->getIdOperand(op) != comps[op]) {
  1866. mismatch = true;
  1867. break;
  1868. }
  1869. }
  1870. if (! mismatch) {
  1871. found = true;
  1872. break;
  1873. }
  1874. }
  1875. return found ? constant->getResultId() : NoResult;
  1876. }
  1877. Id Builder::findStructConstant(Id typeId, const std::vector<Id>& comps)
  1878. {
  1879. Instruction* constant = nullptr;
  1880. bool found = false;
  1881. for (int i = 0; i < (int)groupedStructConstants[typeId].size(); ++i) {
  1882. constant = groupedStructConstants[typeId][i];
  1883. // same contents?
  1884. bool mismatch = false;
  1885. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1886. if (constant->getIdOperand(op) != comps[op]) {
  1887. mismatch = true;
  1888. break;
  1889. }
  1890. }
  1891. if (! mismatch) {
  1892. found = true;
  1893. break;
  1894. }
  1895. }
  1896. return found ? constant->getResultId() : NoResult;
  1897. }
  1898. // Comments in header
  1899. Id Builder::makeCompositeConstant(Id typeId, const std::vector<Id>& members, bool specConstant)
  1900. {
  1901. assert(typeId);
  1902. Op typeClass = getTypeClass(typeId);
  1903. bool replicate = false;
  1904. size_t numMembers = members.size();
  1905. if (useReplicatedComposites || typeClass == Op::OpTypeCooperativeVectorNV) {
  1906. // use replicate if all members are the same
  1907. replicate = numMembers > 0 &&
  1908. std::equal(members.begin() + 1, members.end(), members.begin());
  1909. if (replicate) {
  1910. numMembers = 1;
  1911. addCapability(spv::Capability::ReplicatedCompositesEXT);
  1912. addExtension(spv::E_SPV_EXT_replicated_composites);
  1913. }
  1914. }
  1915. Op opcode = replicate ?
  1916. (specConstant ? Op::OpSpecConstantCompositeReplicateEXT : Op::OpConstantCompositeReplicateEXT) :
  1917. (specConstant ? Op::OpSpecConstantComposite : Op::OpConstantComposite);
  1918. switch (typeClass) {
  1919. case Op::OpTypeVector:
  1920. case Op::OpTypeArray:
  1921. case Op::OpTypeMatrix:
  1922. case Op::OpTypeCooperativeMatrixKHR:
  1923. case Op::OpTypeCooperativeMatrixNV:
  1924. case Op::OpTypeCooperativeVectorNV:
  1925. if (! specConstant) {
  1926. Id existing = findCompositeConstant(typeClass, opcode, typeId, members, numMembers);
  1927. if (existing)
  1928. return existing;
  1929. }
  1930. break;
  1931. case Op::OpTypeStruct:
  1932. if (! specConstant) {
  1933. Id existing = findStructConstant(typeId, members);
  1934. if (existing)
  1935. return existing;
  1936. }
  1937. break;
  1938. default:
  1939. assert(0);
  1940. return makeFloatConstant(0.0);
  1941. }
  1942. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1943. c->reserveOperands(members.size());
  1944. for (size_t op = 0; op < numMembers; ++op)
  1945. c->addIdOperand(members[op]);
  1946. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1947. if (typeClass == Op::OpTypeStruct)
  1948. groupedStructConstants[typeId].push_back(c);
  1949. else
  1950. groupedCompositeConstants[enumCast(typeClass)].push_back(c);
  1951. module.mapInstruction(c);
  1952. return c->getResultId();
  1953. }
  1954. Instruction* Builder::addEntryPoint(ExecutionModel model, Function* function, const char* name)
  1955. {
  1956. Instruction* entryPoint = new Instruction(Op::OpEntryPoint);
  1957. entryPoint->reserveOperands(3);
  1958. entryPoint->addImmediateOperand(model);
  1959. entryPoint->addIdOperand(function->getId());
  1960. entryPoint->addStringOperand(name);
  1961. entryPoints.push_back(std::unique_ptr<Instruction>(entryPoint));
  1962. return entryPoint;
  1963. }
  1964. // Currently relying on the fact that all 'value' of interest are small non-negative values.
  1965. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, int value1, int value2, int value3)
  1966. {
  1967. // entryPoint can be null if we are in compile-only mode
  1968. if (!entryPoint)
  1969. return;
  1970. Instruction* instr = new Instruction(Op::OpExecutionMode);
  1971. instr->reserveOperands(3);
  1972. instr->addIdOperand(entryPoint->getId());
  1973. instr->addImmediateOperand(mode);
  1974. if (value1 >= 0)
  1975. instr->addImmediateOperand(value1);
  1976. if (value2 >= 0)
  1977. instr->addImmediateOperand(value2);
  1978. if (value3 >= 0)
  1979. instr->addImmediateOperand(value3);
  1980. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  1981. }
  1982. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, const std::vector<unsigned>& literals)
  1983. {
  1984. // entryPoint can be null if we are in compile-only mode
  1985. if (!entryPoint)
  1986. return;
  1987. Instruction* instr = new Instruction(Op::OpExecutionMode);
  1988. instr->reserveOperands(literals.size() + 2);
  1989. instr->addIdOperand(entryPoint->getId());
  1990. instr->addImmediateOperand(mode);
  1991. for (auto literal : literals)
  1992. instr->addImmediateOperand(literal);
  1993. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  1994. }
  1995. void Builder::addExecutionModeId(Function* entryPoint, ExecutionMode mode, const std::vector<Id>& operandIds)
  1996. {
  1997. // entryPoint can be null if we are in compile-only mode
  1998. if (!entryPoint)
  1999. return;
  2000. Instruction* instr = new Instruction(Op::OpExecutionModeId);
  2001. instr->reserveOperands(operandIds.size() + 2);
  2002. instr->addIdOperand(entryPoint->getId());
  2003. instr->addImmediateOperand(mode);
  2004. for (auto operandId : operandIds)
  2005. instr->addIdOperand(operandId);
  2006. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  2007. }
  2008. void Builder::addName(Id id, const char* string)
  2009. {
  2010. Instruction* name = new Instruction(Op::OpName);
  2011. name->reserveOperands(2);
  2012. name->addIdOperand(id);
  2013. name->addStringOperand(string);
  2014. names.push_back(std::unique_ptr<Instruction>(name));
  2015. }
  2016. void Builder::addMemberName(Id id, int memberNumber, const char* string)
  2017. {
  2018. Instruction* name = new Instruction(Op::OpMemberName);
  2019. name->reserveOperands(3);
  2020. name->addIdOperand(id);
  2021. name->addImmediateOperand(memberNumber);
  2022. name->addStringOperand(string);
  2023. names.push_back(std::unique_ptr<Instruction>(name));
  2024. }
  2025. void Builder::addDecoration(Id id, Decoration decoration, int num)
  2026. {
  2027. if (decoration == spv::Decoration::Max)
  2028. return;
  2029. Instruction* dec = new Instruction(Op::OpDecorate);
  2030. dec->reserveOperands(2);
  2031. dec->addIdOperand(id);
  2032. dec->addImmediateOperand(decoration);
  2033. if (num >= 0)
  2034. dec->addImmediateOperand(num);
  2035. decorations.insert(std::unique_ptr<Instruction>(dec));
  2036. }
  2037. void Builder::addDecoration(Id id, Decoration decoration, const char* s)
  2038. {
  2039. if (decoration == spv::Decoration::Max)
  2040. return;
  2041. Instruction* dec = new Instruction(Op::OpDecorateString);
  2042. dec->reserveOperands(3);
  2043. dec->addIdOperand(id);
  2044. dec->addImmediateOperand(decoration);
  2045. dec->addStringOperand(s);
  2046. decorations.insert(std::unique_ptr<Instruction>(dec));
  2047. }
  2048. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<unsigned>& literals)
  2049. {
  2050. if (decoration == spv::Decoration::Max)
  2051. return;
  2052. Instruction* dec = new Instruction(Op::OpDecorate);
  2053. dec->reserveOperands(literals.size() + 2);
  2054. dec->addIdOperand(id);
  2055. dec->addImmediateOperand(decoration);
  2056. for (auto literal : literals)
  2057. dec->addImmediateOperand(literal);
  2058. decorations.insert(std::unique_ptr<Instruction>(dec));
  2059. }
  2060. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<const char*>& strings)
  2061. {
  2062. if (decoration == spv::Decoration::Max)
  2063. return;
  2064. Instruction* dec = new Instruction(Op::OpDecorateString);
  2065. dec->reserveOperands(strings.size() + 2);
  2066. dec->addIdOperand(id);
  2067. dec->addImmediateOperand(decoration);
  2068. for (auto string : strings)
  2069. dec->addStringOperand(string);
  2070. decorations.insert(std::unique_ptr<Instruction>(dec));
  2071. }
  2072. void Builder::addLinkageDecoration(Id id, const char* name, spv::LinkageType linkType) {
  2073. Instruction* dec = new Instruction(Op::OpDecorate);
  2074. dec->reserveOperands(4);
  2075. dec->addIdOperand(id);
  2076. dec->addImmediateOperand(spv::Decoration::LinkageAttributes);
  2077. dec->addStringOperand(name);
  2078. dec->addImmediateOperand(linkType);
  2079. decorations.insert(std::unique_ptr<Instruction>(dec));
  2080. }
  2081. void Builder::addDecorationId(Id id, Decoration decoration, Id idDecoration)
  2082. {
  2083. if (decoration == spv::Decoration::Max)
  2084. return;
  2085. Instruction* dec = new Instruction(Op::OpDecorateId);
  2086. dec->reserveOperands(3);
  2087. dec->addIdOperand(id);
  2088. dec->addImmediateOperand(decoration);
  2089. dec->addIdOperand(idDecoration);
  2090. decorations.insert(std::unique_ptr<Instruction>(dec));
  2091. }
  2092. void Builder::addDecorationId(Id id, Decoration decoration, const std::vector<Id>& operandIds)
  2093. {
  2094. if(decoration == spv::Decoration::Max)
  2095. return;
  2096. Instruction* dec = new Instruction(Op::OpDecorateId);
  2097. dec->reserveOperands(operandIds.size() + 2);
  2098. dec->addIdOperand(id);
  2099. dec->addImmediateOperand(decoration);
  2100. for (auto operandId : operandIds)
  2101. dec->addIdOperand(operandId);
  2102. decorations.insert(std::unique_ptr<Instruction>(dec));
  2103. }
  2104. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, int num)
  2105. {
  2106. if (decoration == spv::Decoration::Max)
  2107. return;
  2108. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2109. dec->reserveOperands(3);
  2110. dec->addIdOperand(id);
  2111. dec->addImmediateOperand(member);
  2112. dec->addImmediateOperand(decoration);
  2113. if (num >= 0)
  2114. dec->addImmediateOperand(num);
  2115. decorations.insert(std::unique_ptr<Instruction>(dec));
  2116. }
  2117. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const char *s)
  2118. {
  2119. if (decoration == spv::Decoration::Max)
  2120. return;
  2121. Instruction* dec = new Instruction(Op::OpMemberDecorateStringGOOGLE);
  2122. dec->reserveOperands(4);
  2123. dec->addIdOperand(id);
  2124. dec->addImmediateOperand(member);
  2125. dec->addImmediateOperand(decoration);
  2126. dec->addStringOperand(s);
  2127. decorations.insert(std::unique_ptr<Instruction>(dec));
  2128. }
  2129. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<unsigned>& literals)
  2130. {
  2131. if (decoration == spv::Decoration::Max)
  2132. return;
  2133. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2134. dec->reserveOperands(literals.size() + 3);
  2135. dec->addIdOperand(id);
  2136. dec->addImmediateOperand(member);
  2137. dec->addImmediateOperand(decoration);
  2138. for (auto literal : literals)
  2139. dec->addImmediateOperand(literal);
  2140. decorations.insert(std::unique_ptr<Instruction>(dec));
  2141. }
  2142. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<const char*>& strings)
  2143. {
  2144. if (decoration == spv::Decoration::Max)
  2145. return;
  2146. Instruction* dec = new Instruction(Op::OpMemberDecorateString);
  2147. dec->reserveOperands(strings.size() + 3);
  2148. dec->addIdOperand(id);
  2149. dec->addImmediateOperand(member);
  2150. dec->addImmediateOperand(decoration);
  2151. for (auto string : strings)
  2152. dec->addStringOperand(string);
  2153. decorations.insert(std::unique_ptr<Instruction>(dec));
  2154. }
  2155. void Builder::addInstruction(std::unique_ptr<Instruction> inst) {
  2156. // Phis must appear first in their block, don't insert line tracking instructions
  2157. // in front of them, just add the OpPhi and return.
  2158. if (inst->getOpCode() == Op::OpPhi) {
  2159. buildPoint->addInstruction(std::move(inst));
  2160. return;
  2161. }
  2162. // Optionally insert OpDebugScope
  2163. if (emitNonSemanticShaderDebugInfo && dirtyScopeTracker) {
  2164. if (buildPoint->updateDebugScope(currentDebugScopeId.top())) {
  2165. auto scopeInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2166. scopeInst->reserveOperands(3);
  2167. scopeInst->addIdOperand(nonSemanticShaderDebugInfo);
  2168. scopeInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugScope);
  2169. scopeInst->addIdOperand(currentDebugScopeId.top());
  2170. buildPoint->addInstruction(std::move(scopeInst));
  2171. }
  2172. dirtyScopeTracker = false;
  2173. }
  2174. // Insert OpLine/OpDebugLine if the debug source location has changed
  2175. if (trackDebugInfo && dirtyLineTracker) {
  2176. if (buildPoint->updateDebugSourceLocation(currentLine, 0, currentFileId)) {
  2177. if (emitSpirvDebugInfo) {
  2178. auto lineInst = std::make_unique<Instruction>(Op::OpLine);
  2179. lineInst->reserveOperands(3);
  2180. lineInst->addIdOperand(currentFileId);
  2181. lineInst->addImmediateOperand(currentLine);
  2182. lineInst->addImmediateOperand(0);
  2183. buildPoint->addInstruction(std::move(lineInst));
  2184. }
  2185. if (emitNonSemanticShaderDebugInfo) {
  2186. auto lineInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2187. lineInst->reserveOperands(7);
  2188. lineInst->addIdOperand(nonSemanticShaderDebugInfo);
  2189. lineInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLine);
  2190. lineInst->addIdOperand(makeDebugSource(currentFileId));
  2191. lineInst->addIdOperand(makeUintConstant(currentLine));
  2192. lineInst->addIdOperand(makeUintConstant(currentLine));
  2193. lineInst->addIdOperand(makeUintConstant(0));
  2194. lineInst->addIdOperand(makeUintConstant(0));
  2195. buildPoint->addInstruction(std::move(lineInst));
  2196. }
  2197. }
  2198. dirtyLineTracker = false;
  2199. }
  2200. buildPoint->addInstruction(std::move(inst));
  2201. }
  2202. void Builder::addInstructionNoDebugInfo(std::unique_ptr<Instruction> inst) {
  2203. buildPoint->addInstruction(std::move(inst));
  2204. }
  2205. // Comments in header
  2206. Function* Builder::makeEntryPoint(const char* entryPoint)
  2207. {
  2208. assert(! entryPointFunction);
  2209. auto const returnType = makeVoidType();
  2210. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2211. if(sourceLang == spv::SourceLanguage::HLSL) {
  2212. emitNonSemanticShaderDebugInfo = false;
  2213. }
  2214. Block* entry = nullptr;
  2215. entryPointFunction = makeFunctionEntry(NoPrecision, returnType, entryPoint, LinkageType::Max, {}, {}, &entry);
  2216. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2217. return entryPointFunction;
  2218. }
  2219. // Comments in header
  2220. Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name, LinkageType linkType,
  2221. const std::vector<Id>& paramTypes,
  2222. const std::vector<std::vector<Decoration>>& decorations, Block** entry)
  2223. {
  2224. // Make the function and initial instructions in it
  2225. Id typeId = makeFunctionType(returnType, paramTypes);
  2226. Id firstParamId = paramTypes.size() == 0 ? 0 : getUniqueIds((int)paramTypes.size());
  2227. Id funcId = getUniqueId();
  2228. Function* function = new Function(funcId, returnType, typeId, firstParamId, linkType, name, module);
  2229. // Set up the precisions
  2230. setPrecision(function->getId(), precision);
  2231. function->setReturnPrecision(precision);
  2232. for (unsigned p = 0; p < (unsigned)decorations.size(); ++p) {
  2233. for (int d = 0; d < (int)decorations[p].size(); ++d) {
  2234. addDecoration(firstParamId + p, decorations[p][d]);
  2235. function->addParamPrecision(p, decorations[p][d]);
  2236. }
  2237. }
  2238. // reset last debug scope
  2239. if (emitNonSemanticShaderDebugInfo) {
  2240. dirtyScopeTracker = true;
  2241. }
  2242. // CFG
  2243. assert(entry != nullptr);
  2244. *entry = new Block(getUniqueId(), *function);
  2245. function->addBlock(*entry);
  2246. setBuildPoint(*entry);
  2247. if (name)
  2248. addName(function->getId(), name);
  2249. functions.push_back(std::unique_ptr<Function>(function));
  2250. return function;
  2251. }
  2252. void Builder::setupFunctionDebugInfo(Function* function, const char* name, const std::vector<Id>& paramTypes,
  2253. const std::vector<char const*>& paramNames)
  2254. {
  2255. if (!emitNonSemanticShaderDebugInfo)
  2256. return;
  2257. Id nameId = getStringId(unmangleFunctionName(name));
  2258. Id funcTypeId = function->getFuncTypeId();
  2259. assert(getDebugType(funcTypeId) != NoType);
  2260. Id funcId = function->getId();
  2261. assert(funcId != 0);
  2262. // Make the debug function instruction
  2263. Id debugFuncId = makeDebugFunction(function, nameId, funcTypeId);
  2264. debugFuncIdLookup[funcId] = debugFuncId;
  2265. currentDebugScopeId.push(debugFuncId);
  2266. // DebugScope and DebugLine for parameter DebugDeclares
  2267. assert(paramTypes.size() == paramNames.size());
  2268. if ((int)paramTypes.size() > 0) {
  2269. Id firstParamId = function->getParamId(0);
  2270. for (size_t p = 0; p < paramTypes.size(); ++p) {
  2271. bool passByRef = false;
  2272. Id paramTypeId = paramTypes[p];
  2273. // For pointer-typed parameters, they are actually passed by reference and we need unwrap the pointer to get the actual parameter type.
  2274. if (isPointerType(paramTypeId) || isArrayType(paramTypeId)) {
  2275. passByRef = true;
  2276. paramTypeId = getContainedTypeId(paramTypeId);
  2277. }
  2278. auto const& paramName = paramNames[p];
  2279. auto const debugLocalVariableId = createDebugLocalVariable(getDebugType(paramTypeId), paramName, p + 1);
  2280. auto const paramId = static_cast<Id>(firstParamId + p);
  2281. if (passByRef) {
  2282. makeDebugDeclare(debugLocalVariableId, paramId);
  2283. } else {
  2284. makeDebugValue(debugLocalVariableId, paramId);
  2285. }
  2286. }
  2287. }
  2288. // Clear debug scope stack
  2289. if (emitNonSemanticShaderDebugInfo)
  2290. currentDebugScopeId.pop();
  2291. }
  2292. Id Builder::makeDebugFunction([[maybe_unused]] Function* function, Id nameId, Id funcTypeId)
  2293. {
  2294. assert(function != nullptr);
  2295. assert(nameId != 0);
  2296. assert(funcTypeId != 0);
  2297. assert(getDebugType(funcTypeId) != NoType);
  2298. Id funcId = getUniqueId();
  2299. auto type = new Instruction(funcId, makeVoidType(), Op::OpExtInst);
  2300. type->reserveOperands(11);
  2301. type->addIdOperand(nonSemanticShaderDebugInfo);
  2302. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunction);
  2303. type->addIdOperand(nameId);
  2304. type->addIdOperand(getDebugType(funcTypeId));
  2305. type->addIdOperand(makeDebugSource(currentFileId)); // TODO: This points to file of definition instead of declaration
  2306. type->addIdOperand(makeUintConstant(currentLine)); // TODO: This points to line of definition instead of declaration
  2307. type->addIdOperand(makeUintConstant(0)); // column
  2308. type->addIdOperand(makeDebugCompilationUnit()); // scope
  2309. type->addIdOperand(nameId); // linkage name
  2310. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  2311. type->addIdOperand(makeUintConstant(currentLine));
  2312. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  2313. module.mapInstruction(type);
  2314. return funcId;
  2315. }
  2316. Id Builder::makeDebugLexicalBlock(uint32_t line, uint32_t column) {
  2317. assert(!currentDebugScopeId.empty());
  2318. Id lexId = getUniqueId();
  2319. auto lex = new Instruction(lexId, makeVoidType(), Op::OpExtInst);
  2320. lex->reserveOperands(6);
  2321. lex->addIdOperand(nonSemanticShaderDebugInfo);
  2322. lex->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLexicalBlock);
  2323. lex->addIdOperand(makeDebugSource(currentFileId));
  2324. lex->addIdOperand(makeUintConstant(line));
  2325. lex->addIdOperand(makeUintConstant(column)); // column
  2326. lex->addIdOperand(currentDebugScopeId.top()); // scope
  2327. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(lex));
  2328. module.mapInstruction(lex);
  2329. return lexId;
  2330. }
  2331. std::string Builder::unmangleFunctionName(std::string const& name) const
  2332. {
  2333. assert(name.length() > 0);
  2334. if(name.rfind('(') != std::string::npos) {
  2335. return name.substr(0, name.rfind('('));
  2336. } else {
  2337. return name;
  2338. }
  2339. }
  2340. // Comments in header
  2341. void Builder::makeReturn(bool implicit, Id retVal)
  2342. {
  2343. if (retVal) {
  2344. Instruction* inst = new Instruction(NoResult, NoType, Op::OpReturnValue);
  2345. inst->addIdOperand(retVal);
  2346. addInstruction(std::unique_ptr<Instruction>(inst));
  2347. } else
  2348. addInstruction(std::unique_ptr<Instruction>(new Instruction(NoResult, NoType, Op::OpReturn)));
  2349. if (! implicit)
  2350. createAndSetNoPredecessorBlock("post-return");
  2351. }
  2352. // Comments in header
  2353. void Builder::enterLexicalBlock(uint32_t line, uint32_t column)
  2354. {
  2355. if (!emitNonSemanticShaderDebugInfo) {
  2356. return;
  2357. }
  2358. // Generate new lexical scope debug instruction
  2359. Id lexId = makeDebugLexicalBlock(line, column);
  2360. currentDebugScopeId.push(lexId);
  2361. dirtyScopeTracker = true;
  2362. }
  2363. // Comments in header
  2364. void Builder::leaveLexicalBlock()
  2365. {
  2366. if (!emitNonSemanticShaderDebugInfo) {
  2367. return;
  2368. }
  2369. // Pop current scope from stack and clear current scope
  2370. currentDebugScopeId.pop();
  2371. dirtyScopeTracker = true;
  2372. }
  2373. // Comments in header
  2374. void Builder::enterFunction(Function const* function)
  2375. {
  2376. currentFunction = function;
  2377. // Save and disable debugInfo for HLSL entry point function. It is a wrapper
  2378. // function with no user code in it.
  2379. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2380. if (sourceLang == spv::SourceLanguage::HLSL && function == entryPointFunction) {
  2381. emitNonSemanticShaderDebugInfo = false;
  2382. }
  2383. if (emitNonSemanticShaderDebugInfo) {
  2384. // Initialize scope state
  2385. Id funcId = function->getFuncId();
  2386. Id debugFuncId = getDebugFunction(funcId);
  2387. currentDebugScopeId.push(debugFuncId);
  2388. // Create DebugFunctionDefinition
  2389. spv::Id resultId = getUniqueId();
  2390. Instruction* defInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  2391. defInst->reserveOperands(4);
  2392. defInst->addIdOperand(nonSemanticShaderDebugInfo);
  2393. defInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunctionDefinition);
  2394. defInst->addIdOperand(debugFuncId);
  2395. defInst->addIdOperand(funcId);
  2396. addInstruction(std::unique_ptr<Instruction>(defInst));
  2397. }
  2398. if (auto linkType = function->getLinkType(); linkType != LinkageType::Max) {
  2399. Id funcId = function->getFuncId();
  2400. addCapability(Capability::Linkage);
  2401. addLinkageDecoration(funcId, function->getExportName(), linkType);
  2402. }
  2403. }
  2404. // Comments in header
  2405. void Builder::leaveFunction()
  2406. {
  2407. Block* block = buildPoint;
  2408. Function& function = buildPoint->getParent();
  2409. assert(block);
  2410. // If our function did not contain a return, add a return void now.
  2411. if (! block->isTerminated()) {
  2412. if (function.getReturnType() == makeVoidType())
  2413. makeReturn(true);
  2414. else {
  2415. makeReturn(true, createUndefined(function.getReturnType()));
  2416. }
  2417. }
  2418. // Clear function scope from debug scope stack
  2419. if (emitNonSemanticShaderDebugInfo)
  2420. currentDebugScopeId.pop();
  2421. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2422. // Clear current function record
  2423. currentFunction = nullptr;
  2424. }
  2425. // Comments in header
  2426. void Builder::makeStatementTerminator(spv::Op opcode, const char *name)
  2427. {
  2428. addInstruction(std::unique_ptr<Instruction>(new Instruction(opcode)));
  2429. createAndSetNoPredecessorBlock(name);
  2430. }
  2431. // Comments in header
  2432. void Builder::makeStatementTerminator(spv::Op opcode, const std::vector<Id>& operands, const char* name)
  2433. {
  2434. // It's assumed that the terminator instruction is always of void return type
  2435. // However in future if there is a need for non void return type, new helper
  2436. // methods can be created.
  2437. createNoResultOp(opcode, operands);
  2438. createAndSetNoPredecessorBlock(name);
  2439. }
  2440. void Builder::createConstVariable(Id type, const char* name, Id constant, bool isGlobal)
  2441. {
  2442. if (emitNonSemanticShaderDebugInfo) {
  2443. Id debugType = getDebugType(type);
  2444. if (isGlobal) {
  2445. createDebugGlobalVariable(debugType, name, constant);
  2446. }
  2447. else {
  2448. auto debugLocal = createDebugLocalVariable(debugType, name);
  2449. makeDebugValue(debugLocal, constant);
  2450. }
  2451. }
  2452. }
  2453. // Comments in header
  2454. Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer,
  2455. bool const compilerGenerated)
  2456. {
  2457. Id pointerType = makePointer(storageClass, type);
  2458. Instruction* inst = new Instruction(getUniqueId(), pointerType, Op::OpVariable);
  2459. inst->addImmediateOperand(storageClass);
  2460. if (initializer != NoResult)
  2461. inst->addIdOperand(initializer);
  2462. if (storageClass == StorageClass::Function) {
  2463. // Validation rules require the declaration in the entry block
  2464. buildPoint->getParent().addLocalVariable(std::unique_ptr<Instruction>(inst));
  2465. }
  2466. else {
  2467. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  2468. module.mapInstruction(inst);
  2469. }
  2470. if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
  2471. {
  2472. // For debug info, we prefer respecting how the variable is declared in source code.
  2473. // We may emulate some local variables as global variable with private storage in SPIR-V, but we still want to
  2474. // treat them as local variables in debug info.
  2475. if (storageClass == StorageClass::Function || (currentFunction && storageClass == StorageClass::Private)) {
  2476. auto const debugLocalVariableId = createDebugLocalVariable(getDebugType(type), name);
  2477. makeDebugDeclare(debugLocalVariableId, inst->getResultId());
  2478. }
  2479. else {
  2480. createDebugGlobalVariable(getDebugType(type), name, inst->getResultId());
  2481. }
  2482. }
  2483. if (name)
  2484. addName(inst->getResultId(), name);
  2485. setPrecision(inst->getResultId(), precision);
  2486. return inst->getResultId();
  2487. }
  2488. // Comments in header
  2489. Id Builder::createUndefined(Id type)
  2490. {
  2491. Instruction* inst = new Instruction(getUniqueId(), type, Op::OpUndef);
  2492. addInstruction(std::unique_ptr<Instruction>(inst));
  2493. return inst->getResultId();
  2494. }
  2495. // av/vis/nonprivate are unnecessary and illegal for some storage classes.
  2496. spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc)
  2497. const
  2498. {
  2499. switch (sc) {
  2500. case spv::StorageClass::Uniform:
  2501. case spv::StorageClass::Workgroup:
  2502. case spv::StorageClass::StorageBuffer:
  2503. case spv::StorageClass::PhysicalStorageBufferEXT:
  2504. break;
  2505. default:
  2506. memoryAccess = spv::MemoryAccessMask(memoryAccess &
  2507. ~(spv::MemoryAccessMask::MakePointerAvailableKHR |
  2508. spv::MemoryAccessMask::MakePointerVisibleKHR |
  2509. spv::MemoryAccessMask::NonPrivatePointerKHR));
  2510. break;
  2511. }
  2512. return memoryAccess;
  2513. }
  2514. // Comments in header
  2515. void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope,
  2516. unsigned int alignment)
  2517. {
  2518. Instruction* store = new Instruction(Op::OpStore);
  2519. store->reserveOperands(2);
  2520. store->addIdOperand(lValue);
  2521. store->addIdOperand(rValue);
  2522. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2523. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2524. store->addImmediateOperand(memoryAccess);
  2525. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2526. store->addImmediateOperand(alignment);
  2527. }
  2528. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerAvailableKHR)) {
  2529. store->addIdOperand(makeUintConstant(scope));
  2530. }
  2531. }
  2532. addInstruction(std::unique_ptr<Instruction>(store));
  2533. }
  2534. // Comments in header
  2535. Id Builder::createLoad(Id lValue, spv::Decoration precision, spv::MemoryAccessMask memoryAccess,
  2536. spv::Scope scope, unsigned int alignment)
  2537. {
  2538. Instruction* load = new Instruction(getUniqueId(), getDerefTypeId(lValue), Op::OpLoad);
  2539. load->addIdOperand(lValue);
  2540. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2541. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2542. load->addImmediateOperand(memoryAccess);
  2543. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2544. load->addImmediateOperand(alignment);
  2545. }
  2546. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerVisibleKHR)) {
  2547. load->addIdOperand(makeUintConstant(scope));
  2548. }
  2549. }
  2550. addInstruction(std::unique_ptr<Instruction>(load));
  2551. setPrecision(load->getResultId(), precision);
  2552. return load->getResultId();
  2553. }
  2554. // Comments in header
  2555. Id Builder::createAccessChain(StorageClass storageClass, Id base, const std::vector<Id>& offsets)
  2556. {
  2557. // Figure out the final resulting type.
  2558. Id typeId = getResultingAccessChainType();
  2559. typeId = makePointer(storageClass, typeId);
  2560. // Make the instruction
  2561. Instruction* chain = new Instruction(getUniqueId(), typeId, Op::OpAccessChain);
  2562. chain->reserveOperands(offsets.size() + 1);
  2563. chain->addIdOperand(base);
  2564. for (int i = 0; i < (int)offsets.size(); ++i)
  2565. chain->addIdOperand(offsets[i]);
  2566. addInstruction(std::unique_ptr<Instruction>(chain));
  2567. return chain->getResultId();
  2568. }
  2569. Id Builder::createArrayLength(Id base, unsigned int member, unsigned int bits)
  2570. {
  2571. spv::Id intType = makeUintType(bits);
  2572. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpArrayLength);
  2573. length->reserveOperands(2);
  2574. length->addIdOperand(base);
  2575. length->addImmediateOperand(member);
  2576. addInstruction(std::unique_ptr<Instruction>(length));
  2577. return length->getResultId();
  2578. }
  2579. Id Builder::createCooperativeMatrixLengthKHR(Id type)
  2580. {
  2581. spv::Id intType = makeUintType(32);
  2582. // Generate code for spec constants if in spec constant operation
  2583. // generation mode.
  2584. if (generatingOpCodeForSpecConst) {
  2585. return createSpecConstantOp(Op::OpCooperativeMatrixLengthKHR, intType, std::vector<Id>(1, type), std::vector<Id>());
  2586. }
  2587. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthKHR);
  2588. length->addIdOperand(type);
  2589. addInstruction(std::unique_ptr<Instruction>(length));
  2590. return length->getResultId();
  2591. }
  2592. Id Builder::createCooperativeMatrixLengthNV(Id type)
  2593. {
  2594. spv::Id intType = makeUintType(32);
  2595. // Generate code for spec constants if in spec constant operation
  2596. // generation mode.
  2597. if (generatingOpCodeForSpecConst) {
  2598. return createSpecConstantOp(Op::OpCooperativeMatrixLengthNV, intType, std::vector<Id>(1, type), std::vector<Id>());
  2599. }
  2600. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthNV);
  2601. length->addIdOperand(type);
  2602. addInstruction(std::unique_ptr<Instruction>(length));
  2603. return length->getResultId();
  2604. }
  2605. Id Builder::createCompositeExtract(Id composite, Id typeId, unsigned index)
  2606. {
  2607. // Generate code for spec constants if in spec constant operation
  2608. // generation mode.
  2609. if (generatingOpCodeForSpecConst) {
  2610. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite),
  2611. std::vector<Id>(1, index));
  2612. }
  2613. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2614. extract->reserveOperands(2);
  2615. extract->addIdOperand(composite);
  2616. extract->addImmediateOperand(index);
  2617. addInstruction(std::unique_ptr<Instruction>(extract));
  2618. return extract->getResultId();
  2619. }
  2620. Id Builder::createCompositeExtract(Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2621. {
  2622. // Generate code for spec constants if in spec constant operation
  2623. // generation mode.
  2624. if (generatingOpCodeForSpecConst) {
  2625. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite), indexes);
  2626. }
  2627. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2628. extract->reserveOperands(indexes.size() + 1);
  2629. extract->addIdOperand(composite);
  2630. for (int i = 0; i < (int)indexes.size(); ++i)
  2631. extract->addImmediateOperand(indexes[i]);
  2632. addInstruction(std::unique_ptr<Instruction>(extract));
  2633. return extract->getResultId();
  2634. }
  2635. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, unsigned index)
  2636. {
  2637. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2638. insert->reserveOperands(3);
  2639. insert->addIdOperand(object);
  2640. insert->addIdOperand(composite);
  2641. insert->addImmediateOperand(index);
  2642. addInstruction(std::unique_ptr<Instruction>(insert));
  2643. return insert->getResultId();
  2644. }
  2645. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2646. {
  2647. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2648. insert->reserveOperands(indexes.size() + 2);
  2649. insert->addIdOperand(object);
  2650. insert->addIdOperand(composite);
  2651. for (int i = 0; i < (int)indexes.size(); ++i)
  2652. insert->addImmediateOperand(indexes[i]);
  2653. addInstruction(std::unique_ptr<Instruction>(insert));
  2654. return insert->getResultId();
  2655. }
  2656. Id Builder::createVectorExtractDynamic(Id vector, Id typeId, Id componentIndex)
  2657. {
  2658. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpVectorExtractDynamic);
  2659. extract->reserveOperands(2);
  2660. extract->addIdOperand(vector);
  2661. extract->addIdOperand(componentIndex);
  2662. addInstruction(std::unique_ptr<Instruction>(extract));
  2663. return extract->getResultId();
  2664. }
  2665. Id Builder::createVectorInsertDynamic(Id vector, Id typeId, Id component, Id componentIndex)
  2666. {
  2667. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpVectorInsertDynamic);
  2668. insert->reserveOperands(3);
  2669. insert->addIdOperand(vector);
  2670. insert->addIdOperand(component);
  2671. insert->addIdOperand(componentIndex);
  2672. addInstruction(std::unique_ptr<Instruction>(insert));
  2673. return insert->getResultId();
  2674. }
  2675. // An opcode that has no operands, no result id, and no type
  2676. void Builder::createNoResultOp(Op opCode)
  2677. {
  2678. Instruction* op = new Instruction(opCode);
  2679. addInstruction(std::unique_ptr<Instruction>(op));
  2680. }
  2681. // An opcode that has one id operand, no result id, and no type
  2682. void Builder::createNoResultOp(Op opCode, Id operand)
  2683. {
  2684. Instruction* op = new Instruction(opCode);
  2685. op->addIdOperand(operand);
  2686. addInstruction(std::unique_ptr<Instruction>(op));
  2687. }
  2688. // An opcode that has one or more operands, no result id, and no type
  2689. void Builder::createNoResultOp(Op opCode, const std::vector<Id>& operands)
  2690. {
  2691. Instruction* op = new Instruction(opCode);
  2692. op->reserveOperands(operands.size());
  2693. for (auto id : operands) {
  2694. op->addIdOperand(id);
  2695. }
  2696. addInstruction(std::unique_ptr<Instruction>(op));
  2697. }
  2698. // An opcode that has multiple operands, no result id, and no type
  2699. void Builder::createNoResultOp(Op opCode, const std::vector<IdImmediate>& operands)
  2700. {
  2701. Instruction* op = new Instruction(opCode);
  2702. op->reserveOperands(operands.size());
  2703. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  2704. if (it->isId)
  2705. op->addIdOperand(it->word);
  2706. else
  2707. op->addImmediateOperand(it->word);
  2708. }
  2709. addInstruction(std::unique_ptr<Instruction>(op));
  2710. }
  2711. void Builder::createControlBarrier(Scope execution, Scope memory, MemorySemanticsMask semantics)
  2712. {
  2713. Instruction* op = new Instruction(Op::OpControlBarrier);
  2714. op->reserveOperands(3);
  2715. op->addIdOperand(makeUintConstant(execution));
  2716. op->addIdOperand(makeUintConstant(memory));
  2717. op->addIdOperand(makeUintConstant(semantics));
  2718. addInstruction(std::unique_ptr<Instruction>(op));
  2719. }
  2720. void Builder::createMemoryBarrier(Scope executionScope, MemorySemanticsMask memorySemantics)
  2721. {
  2722. Instruction* op = new Instruction(Op::OpMemoryBarrier);
  2723. op->reserveOperands(2);
  2724. op->addIdOperand(makeUintConstant((unsigned)executionScope));
  2725. op->addIdOperand(makeUintConstant((unsigned)memorySemantics));
  2726. addInstruction(std::unique_ptr<Instruction>(op));
  2727. }
  2728. // An opcode that has one operands, a result id, and a type
  2729. Id Builder::createUnaryOp(Op opCode, Id typeId, Id operand)
  2730. {
  2731. // Generate code for spec constants if in spec constant operation
  2732. // generation mode.
  2733. if (generatingOpCodeForSpecConst) {
  2734. return createSpecConstantOp(opCode, typeId, std::vector<Id>(1, operand), std::vector<Id>());
  2735. }
  2736. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2737. op->addIdOperand(operand);
  2738. addInstruction(std::unique_ptr<Instruction>(op));
  2739. return op->getResultId();
  2740. }
  2741. Id Builder::createBinOp(Op opCode, Id typeId, Id left, Id right)
  2742. {
  2743. // Generate code for spec constants if in spec constant operation
  2744. // generation mode.
  2745. if (generatingOpCodeForSpecConst) {
  2746. std::vector<Id> operands(2);
  2747. operands[0] = left; operands[1] = right;
  2748. return createSpecConstantOp(opCode, typeId, operands, std::vector<Id>());
  2749. }
  2750. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2751. op->reserveOperands(2);
  2752. op->addIdOperand(left);
  2753. op->addIdOperand(right);
  2754. addInstruction(std::unique_ptr<Instruction>(op));
  2755. return op->getResultId();
  2756. }
  2757. Id Builder::createTriOp(Op opCode, Id typeId, Id op1, Id op2, Id op3)
  2758. {
  2759. // Generate code for spec constants if in spec constant operation
  2760. // generation mode.
  2761. if (generatingOpCodeForSpecConst) {
  2762. std::vector<Id> operands(3);
  2763. operands[0] = op1;
  2764. operands[1] = op2;
  2765. operands[2] = op3;
  2766. return createSpecConstantOp(
  2767. opCode, typeId, operands, std::vector<Id>());
  2768. }
  2769. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2770. op->reserveOperands(3);
  2771. op->addIdOperand(op1);
  2772. op->addIdOperand(op2);
  2773. op->addIdOperand(op3);
  2774. addInstruction(std::unique_ptr<Instruction>(op));
  2775. return op->getResultId();
  2776. }
  2777. Id Builder::createOp(Op opCode, Id typeId, const std::vector<Id>& operands)
  2778. {
  2779. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2780. op->reserveOperands(operands.size());
  2781. for (auto id : operands)
  2782. op->addIdOperand(id);
  2783. addInstruction(std::unique_ptr<Instruction>(op));
  2784. return op->getResultId();
  2785. }
  2786. Id Builder::createOp(Op opCode, Id typeId, const std::vector<IdImmediate>& operands)
  2787. {
  2788. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2789. op->reserveOperands(operands.size());
  2790. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  2791. if (it->isId)
  2792. op->addIdOperand(it->word);
  2793. else
  2794. op->addImmediateOperand(it->word);
  2795. }
  2796. addInstruction(std::unique_ptr<Instruction>(op));
  2797. return op->getResultId();
  2798. }
  2799. Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector<Id>& operands,
  2800. const std::vector<unsigned>& literals)
  2801. {
  2802. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpSpecConstantOp);
  2803. op->reserveOperands(operands.size() + literals.size() + 1);
  2804. op->addImmediateOperand((unsigned) opCode);
  2805. for (auto it = operands.cbegin(); it != operands.cend(); ++it)
  2806. op->addIdOperand(*it);
  2807. for (auto it = literals.cbegin(); it != literals.cend(); ++it)
  2808. op->addImmediateOperand(*it);
  2809. module.mapInstruction(op);
  2810. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(op));
  2811. // OpSpecConstantOp's using 8 or 16 bit types require the associated capability
  2812. if (containsType(typeId, Op::OpTypeInt, 8))
  2813. addCapability(Capability::Int8);
  2814. if (containsType(typeId, Op::OpTypeInt, 16))
  2815. addCapability(Capability::Int16);
  2816. if (containsType(typeId, Op::OpTypeFloat, 16))
  2817. addCapability(Capability::Float16);
  2818. return op->getResultId();
  2819. }
  2820. Id Builder::createFunctionCall(spv::Function* function, const std::vector<spv::Id>& args)
  2821. {
  2822. Instruction* op = new Instruction(getUniqueId(), function->getReturnType(), Op::OpFunctionCall);
  2823. op->reserveOperands(args.size() + 1);
  2824. op->addIdOperand(function->getId());
  2825. for (int a = 0; a < (int)args.size(); ++a)
  2826. op->addIdOperand(args[a]);
  2827. addInstruction(std::unique_ptr<Instruction>(op));
  2828. return op->getResultId();
  2829. }
  2830. // Comments in header
  2831. Id Builder::createRvalueSwizzle(Decoration precision, Id typeId, Id source, const std::vector<unsigned>& channels)
  2832. {
  2833. if (channels.size() == 1)
  2834. return setPrecision(createCompositeExtract(source, typeId, channels.front()), precision);
  2835. if (generatingOpCodeForSpecConst) {
  2836. std::vector<Id> operands(2);
  2837. operands[0] = operands[1] = source;
  2838. return setPrecision(createSpecConstantOp(Op::OpVectorShuffle, typeId, operands, channels), precision);
  2839. }
  2840. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  2841. assert(isVector(source));
  2842. swizzle->reserveOperands(channels.size() + 2);
  2843. swizzle->addIdOperand(source);
  2844. swizzle->addIdOperand(source);
  2845. for (int i = 0; i < (int)channels.size(); ++i)
  2846. swizzle->addImmediateOperand(channels[i]);
  2847. addInstruction(std::unique_ptr<Instruction>(swizzle));
  2848. return setPrecision(swizzle->getResultId(), precision);
  2849. }
  2850. // Comments in header
  2851. Id Builder::createLvalueSwizzle(Id typeId, Id target, Id source, const std::vector<unsigned>& channels)
  2852. {
  2853. if (channels.size() == 1 && getNumComponents(source) == 1)
  2854. return createCompositeInsert(source, target, typeId, channels.front());
  2855. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  2856. assert(isVector(target));
  2857. swizzle->reserveOperands(2);
  2858. swizzle->addIdOperand(target);
  2859. assert(getNumComponents(source) == channels.size());
  2860. assert(isVector(source));
  2861. swizzle->addIdOperand(source);
  2862. // Set up an identity shuffle from the base value to the result value
  2863. unsigned int components[4];
  2864. int numTargetComponents = getNumComponents(target);
  2865. for (int i = 0; i < numTargetComponents; ++i)
  2866. components[i] = i;
  2867. // Punch in the l-value swizzle
  2868. for (int i = 0; i < (int)channels.size(); ++i)
  2869. components[channels[i]] = numTargetComponents + i;
  2870. // finish the instruction with these components selectors
  2871. swizzle->reserveOperands(numTargetComponents);
  2872. for (int i = 0; i < numTargetComponents; ++i)
  2873. swizzle->addImmediateOperand(components[i]);
  2874. addInstruction(std::unique_ptr<Instruction>(swizzle));
  2875. return swizzle->getResultId();
  2876. }
  2877. // Comments in header
  2878. void Builder::promoteScalar(Decoration precision, Id& left, Id& right)
  2879. {
  2880. int direction = getNumComponents(right) - getNumComponents(left);
  2881. if (direction > 0)
  2882. left = smearScalar(precision, left, makeVectorType(getTypeId(left), getNumComponents(right)));
  2883. else if (direction < 0)
  2884. right = smearScalar(precision, right, makeVectorType(getTypeId(right), getNumComponents(left)));
  2885. return;
  2886. }
  2887. // Comments in header
  2888. Id Builder::smearScalar(Decoration precision, Id scalar, Id vectorType)
  2889. {
  2890. assert(getNumComponents(scalar) == 1);
  2891. assert(getTypeId(scalar) == getScalarTypeId(vectorType));
  2892. int numComponents = getNumTypeComponents(vectorType);
  2893. if (numComponents == 1 && !isCooperativeVectorType(vectorType))
  2894. return scalar;
  2895. Instruction* smear = nullptr;
  2896. if (generatingOpCodeForSpecConst) {
  2897. auto members = std::vector<spv::Id>(numComponents, scalar);
  2898. // Sometime even in spec-constant-op mode, the temporary vector created by
  2899. // promoting a scalar might not be a spec constant. This should depend on
  2900. // the scalar.
  2901. // e.g.:
  2902. // const vec2 spec_const_result = a_spec_const_vec2 + a_front_end_const_scalar;
  2903. // In such cases, the temporary vector created from a_front_end_const_scalar
  2904. // is not a spec constant vector, even though the binary operation node is marked
  2905. // as 'specConstant' and we are in spec-constant-op mode.
  2906. auto result_id = makeCompositeConstant(vectorType, members, isSpecConstant(scalar));
  2907. smear = module.getInstruction(result_id);
  2908. } else {
  2909. bool replicate = (useReplicatedComposites || isCooperativeVectorType(vectorType)) && (numComponents > 0);
  2910. if (replicate) {
  2911. numComponents = 1;
  2912. addCapability(spv::Capability::ReplicatedCompositesEXT);
  2913. addExtension(spv::E_SPV_EXT_replicated_composites);
  2914. }
  2915. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  2916. smear = new Instruction(getUniqueId(), vectorType, opcode);
  2917. smear->reserveOperands(numComponents);
  2918. for (int c = 0; c < numComponents; ++c)
  2919. smear->addIdOperand(scalar);
  2920. addInstruction(std::unique_ptr<Instruction>(smear));
  2921. }
  2922. return setPrecision(smear->getResultId(), precision);
  2923. }
  2924. // Comments in header
  2925. Id Builder::createBuiltinCall(Id resultType, Id builtins, int entryPoint, const std::vector<Id>& args)
  2926. {
  2927. Instruction* inst = new Instruction(getUniqueId(), resultType, Op::OpExtInst);
  2928. inst->reserveOperands(args.size() + 2);
  2929. inst->addIdOperand(builtins);
  2930. inst->addImmediateOperand(entryPoint);
  2931. for (int arg = 0; arg < (int)args.size(); ++arg)
  2932. inst->addIdOperand(args[arg]);
  2933. addInstruction(std::unique_ptr<Instruction>(inst));
  2934. return inst->getResultId();
  2935. }
  2936. // Accept all parameters needed to create a texture instruction.
  2937. // Create the correct instruction based on the inputs, and make the call.
  2938. Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse, bool fetch, bool proj, bool gather,
  2939. bool noImplicitLod, const TextureParameters& parameters, ImageOperandsMask signExtensionMask)
  2940. {
  2941. std::vector<Id> texArgs;
  2942. //
  2943. // Set up the fixed arguments
  2944. //
  2945. bool explicitLod = false;
  2946. texArgs.push_back(parameters.sampler);
  2947. texArgs.push_back(parameters.coords);
  2948. if (parameters.Dref != NoResult)
  2949. texArgs.push_back(parameters.Dref);
  2950. if (parameters.component != NoResult)
  2951. texArgs.push_back(parameters.component);
  2952. if (parameters.granularity != NoResult)
  2953. texArgs.push_back(parameters.granularity);
  2954. if (parameters.coarse != NoResult)
  2955. texArgs.push_back(parameters.coarse);
  2956. //
  2957. // Set up the optional arguments
  2958. //
  2959. size_t optArgNum = texArgs.size(); // the position of the mask for the optional arguments, if any.
  2960. ImageOperandsMask mask = ImageOperandsMask::MaskNone; // the mask operand
  2961. if (parameters.bias) {
  2962. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Bias);
  2963. texArgs.push_back(parameters.bias);
  2964. }
  2965. if (parameters.lod) {
  2966. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  2967. texArgs.push_back(parameters.lod);
  2968. explicitLod = true;
  2969. } else if (parameters.gradX) {
  2970. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Grad);
  2971. texArgs.push_back(parameters.gradX);
  2972. texArgs.push_back(parameters.gradY);
  2973. explicitLod = true;
  2974. } else if (noImplicitLod && ! fetch && ! gather) {
  2975. // have to explicitly use lod of 0 if not allowed to have them be implicit, and
  2976. // we would otherwise be about to issue an implicit instruction
  2977. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  2978. texArgs.push_back(makeFloatConstant(0.0));
  2979. explicitLod = true;
  2980. }
  2981. if (parameters.offset) {
  2982. if (isConstant(parameters.offset))
  2983. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffset);
  2984. else {
  2985. addCapability(Capability::ImageGatherExtended);
  2986. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offset);
  2987. }
  2988. texArgs.push_back(parameters.offset);
  2989. }
  2990. if (parameters.offsets) {
  2991. if (!isConstant(parameters.offsets) && sourceLang == spv::SourceLanguage::GLSL) {
  2992. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offsets);
  2993. } else {
  2994. addCapability(Capability::ImageGatherExtended);
  2995. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffsets);
  2996. }
  2997. texArgs.push_back(parameters.offsets);
  2998. }
  2999. if (parameters.sample) {
  3000. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Sample);
  3001. texArgs.push_back(parameters.sample);
  3002. }
  3003. if (parameters.lodClamp) {
  3004. // capability if this bit is used
  3005. addCapability(Capability::MinLod);
  3006. mask = (ImageOperandsMask)(mask | ImageOperandsMask::MinLod);
  3007. texArgs.push_back(parameters.lodClamp);
  3008. }
  3009. if (parameters.nonprivate) {
  3010. mask = mask | ImageOperandsMask::NonPrivateTexelKHR;
  3011. }
  3012. if (parameters.volatil) {
  3013. mask = mask | ImageOperandsMask::VolatileTexelKHR;
  3014. }
  3015. if (parameters.nontemporal) {
  3016. mask = mask | ImageOperandsMask::Nontemporal;
  3017. }
  3018. mask = mask | signExtensionMask;
  3019. // insert the operand for the mask, if any bits were set.
  3020. if (mask != ImageOperandsMask::MaskNone)
  3021. texArgs.insert(texArgs.begin() + optArgNum, (Id)mask);
  3022. //
  3023. // Set up the instruction
  3024. //
  3025. Op opCode = Op::OpNop; // All paths below need to set this
  3026. if (fetch) {
  3027. if (sparse)
  3028. opCode = Op::OpImageSparseFetch;
  3029. else
  3030. opCode = Op::OpImageFetch;
  3031. } else if (parameters.granularity && parameters.coarse) {
  3032. opCode = Op::OpImageSampleFootprintNV;
  3033. } else if (gather) {
  3034. if (parameters.Dref)
  3035. if (sparse)
  3036. opCode = Op::OpImageSparseDrefGather;
  3037. else
  3038. opCode = Op::OpImageDrefGather;
  3039. else
  3040. if (sparse)
  3041. opCode = Op::OpImageSparseGather;
  3042. else
  3043. opCode = Op::OpImageGather;
  3044. } else if (explicitLod) {
  3045. if (parameters.Dref) {
  3046. if (proj)
  3047. if (sparse)
  3048. opCode = Op::OpImageSparseSampleProjDrefExplicitLod;
  3049. else
  3050. opCode = Op::OpImageSampleProjDrefExplicitLod;
  3051. else
  3052. if (sparse)
  3053. opCode = Op::OpImageSparseSampleDrefExplicitLod;
  3054. else
  3055. opCode = Op::OpImageSampleDrefExplicitLod;
  3056. } else {
  3057. if (proj)
  3058. if (sparse)
  3059. opCode = Op::OpImageSparseSampleProjExplicitLod;
  3060. else
  3061. opCode = Op::OpImageSampleProjExplicitLod;
  3062. else
  3063. if (sparse)
  3064. opCode = Op::OpImageSparseSampleExplicitLod;
  3065. else
  3066. opCode = Op::OpImageSampleExplicitLod;
  3067. }
  3068. } else {
  3069. if (parameters.Dref) {
  3070. if (proj)
  3071. if (sparse)
  3072. opCode = Op::OpImageSparseSampleProjDrefImplicitLod;
  3073. else
  3074. opCode = Op::OpImageSampleProjDrefImplicitLod;
  3075. else
  3076. if (sparse)
  3077. opCode = Op::OpImageSparseSampleDrefImplicitLod;
  3078. else
  3079. opCode = Op::OpImageSampleDrefImplicitLod;
  3080. } else {
  3081. if (proj)
  3082. if (sparse)
  3083. opCode = Op::OpImageSparseSampleProjImplicitLod;
  3084. else
  3085. opCode = Op::OpImageSampleProjImplicitLod;
  3086. else
  3087. if (sparse)
  3088. opCode = Op::OpImageSparseSampleImplicitLod;
  3089. else
  3090. opCode = Op::OpImageSampleImplicitLod;
  3091. }
  3092. }
  3093. // See if the result type is expecting a smeared result.
  3094. // This happens when a legacy shadow*() call is made, which
  3095. // gets a vec4 back instead of a float.
  3096. Id smearedType = resultType;
  3097. if (! isScalarType(resultType)) {
  3098. switch (opCode) {
  3099. case Op::OpImageSampleDrefImplicitLod:
  3100. case Op::OpImageSampleDrefExplicitLod:
  3101. case Op::OpImageSampleProjDrefImplicitLod:
  3102. case Op::OpImageSampleProjDrefExplicitLod:
  3103. resultType = getScalarTypeId(resultType);
  3104. break;
  3105. default:
  3106. break;
  3107. }
  3108. }
  3109. Id typeId0 = 0;
  3110. Id typeId1 = 0;
  3111. if (sparse) {
  3112. typeId0 = resultType;
  3113. typeId1 = getDerefTypeId(parameters.texelOut);
  3114. resultType = makeStructResultType(typeId0, typeId1);
  3115. }
  3116. // Build the SPIR-V instruction
  3117. Instruction* textureInst = new Instruction(getUniqueId(), resultType, opCode);
  3118. textureInst->reserveOperands(optArgNum + (texArgs.size() - (optArgNum + 1)));
  3119. for (size_t op = 0; op < optArgNum; ++op)
  3120. textureInst->addIdOperand(texArgs[op]);
  3121. if (optArgNum < texArgs.size())
  3122. textureInst->addImmediateOperand(texArgs[optArgNum]);
  3123. for (size_t op = optArgNum + 1; op < texArgs.size(); ++op)
  3124. textureInst->addIdOperand(texArgs[op]);
  3125. setPrecision(textureInst->getResultId(), precision);
  3126. addInstruction(std::unique_ptr<Instruction>(textureInst));
  3127. Id resultId = textureInst->getResultId();
  3128. if (sparse) {
  3129. // set capability
  3130. addCapability(Capability::SparseResidency);
  3131. // Decode the return type that was a special structure
  3132. createStore(createCompositeExtract(resultId, typeId1, 1), parameters.texelOut);
  3133. resultId = createCompositeExtract(resultId, typeId0, 0);
  3134. setPrecision(resultId, precision);
  3135. } else {
  3136. // When a smear is needed, do it, as per what was computed
  3137. // above when resultType was changed to a scalar type.
  3138. if (resultType != smearedType)
  3139. resultId = smearScalar(precision, resultId, smearedType);
  3140. }
  3141. return resultId;
  3142. }
  3143. // Comments in header
  3144. Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameters, bool isUnsignedResult)
  3145. {
  3146. // Figure out the result type
  3147. Id resultType = 0;
  3148. switch (opCode) {
  3149. case Op::OpImageQuerySize:
  3150. case Op::OpImageQuerySizeLod:
  3151. {
  3152. int numComponents = 0;
  3153. switch (getTypeDimensionality(getImageType(parameters.sampler))) {
  3154. case Dim::Dim1D:
  3155. case Dim::Buffer:
  3156. numComponents = 1;
  3157. break;
  3158. case Dim::Dim2D:
  3159. case Dim::Cube:
  3160. case Dim::Rect:
  3161. case Dim::SubpassData:
  3162. numComponents = 2;
  3163. break;
  3164. case Dim::Dim3D:
  3165. numComponents = 3;
  3166. break;
  3167. default:
  3168. assert(0);
  3169. break;
  3170. }
  3171. if (isArrayedImageType(getImageType(parameters.sampler)))
  3172. ++numComponents;
  3173. Id intType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3174. if (numComponents == 1)
  3175. resultType = intType;
  3176. else
  3177. resultType = makeVectorType(intType, numComponents);
  3178. break;
  3179. }
  3180. case Op::OpImageQueryLod:
  3181. resultType = makeVectorType(getScalarTypeId(getTypeId(parameters.coords)), 2);
  3182. break;
  3183. case Op::OpImageQueryLevels:
  3184. case Op::OpImageQuerySamples:
  3185. resultType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3186. break;
  3187. default:
  3188. assert(0);
  3189. break;
  3190. }
  3191. Instruction* query = new Instruction(getUniqueId(), resultType, opCode);
  3192. query->addIdOperand(parameters.sampler);
  3193. if (parameters.coords)
  3194. query->addIdOperand(parameters.coords);
  3195. if (parameters.lod)
  3196. query->addIdOperand(parameters.lod);
  3197. addInstruction(std::unique_ptr<Instruction>(query));
  3198. addCapability(Capability::ImageQuery);
  3199. return query->getResultId();
  3200. }
  3201. // External comments in header.
  3202. // Operates recursively to visit the composite's hierarchy.
  3203. Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, bool equal)
  3204. {
  3205. Id boolType = makeBoolType();
  3206. Id valueType = getTypeId(value1);
  3207. Id resultId = NoResult;
  3208. int numConstituents = getNumTypeConstituents(valueType);
  3209. // Scalars and Vectors
  3210. if (isScalarType(valueType) || isVectorType(valueType)) {
  3211. assert(valueType == getTypeId(value2));
  3212. // These just need a single comparison, just have
  3213. // to figure out what it is.
  3214. Op op;
  3215. switch (getMostBasicTypeClass(valueType)) {
  3216. case Op::OpTypeFloat:
  3217. op = equal ? Op::OpFOrdEqual : Op::OpFUnordNotEqual;
  3218. break;
  3219. case Op::OpTypeInt:
  3220. default:
  3221. op = equal ? Op::OpIEqual : Op::OpINotEqual;
  3222. break;
  3223. case Op::OpTypeBool:
  3224. op = equal ? Op::OpLogicalEqual : Op::OpLogicalNotEqual;
  3225. precision = NoPrecision;
  3226. break;
  3227. }
  3228. if (isScalarType(valueType)) {
  3229. // scalar
  3230. resultId = createBinOp(op, boolType, value1, value2);
  3231. } else {
  3232. // vector
  3233. resultId = createBinOp(op, makeVectorType(boolType, numConstituents), value1, value2);
  3234. setPrecision(resultId, precision);
  3235. // reduce vector compares...
  3236. resultId = createUnaryOp(equal ? Op::OpAll : Op::OpAny, boolType, resultId);
  3237. }
  3238. return setPrecision(resultId, precision);
  3239. }
  3240. // Only structs, arrays, and matrices should be left.
  3241. // They share in common the reduction operation across their constituents.
  3242. assert(isAggregateType(valueType) || isMatrixType(valueType));
  3243. // Compare each pair of constituents
  3244. for (int constituent = 0; constituent < numConstituents; ++constituent) {
  3245. std::vector<unsigned> indexes(1, constituent);
  3246. Id constituentType1 = getContainedTypeId(getTypeId(value1), constituent);
  3247. Id constituentType2 = getContainedTypeId(getTypeId(value2), constituent);
  3248. Id constituent1 = createCompositeExtract(value1, constituentType1, indexes);
  3249. Id constituent2 = createCompositeExtract(value2, constituentType2, indexes);
  3250. Id subResultId = createCompositeCompare(precision, constituent1, constituent2, equal);
  3251. if (constituent == 0)
  3252. resultId = subResultId;
  3253. else
  3254. resultId = setPrecision(createBinOp(equal ? Op::OpLogicalAnd : Op::OpLogicalOr, boolType, resultId, subResultId),
  3255. precision);
  3256. }
  3257. return resultId;
  3258. }
  3259. // OpCompositeConstruct
  3260. Id Builder::createCompositeConstruct(Id typeId, const std::vector<Id>& constituents)
  3261. {
  3262. assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 &&
  3263. getNumTypeConstituents(typeId) == constituents.size()) ||
  3264. (isCooperativeVectorType(typeId) && constituents.size() == 1));
  3265. if (generatingOpCodeForSpecConst) {
  3266. // Sometime, even in spec-constant-op mode, the constant composite to be
  3267. // constructed may not be a specialization constant.
  3268. // e.g.:
  3269. // const mat2 m2 = mat2(a_spec_const, a_front_end_const, another_front_end_const, third_front_end_const);
  3270. // The first column vector should be a spec constant one, as a_spec_const is a spec constant.
  3271. // The second column vector should NOT be spec constant, as it does not contain any spec constants.
  3272. // To handle such cases, we check the constituents of the constant vector to determine whether this
  3273. // vector should be created as a spec constant.
  3274. return makeCompositeConstant(typeId, constituents,
  3275. std::any_of(constituents.begin(), constituents.end(),
  3276. [&](spv::Id id) { return isSpecConstant(id); }));
  3277. }
  3278. bool replicate = false;
  3279. size_t numConstituents = constituents.size();
  3280. if (useReplicatedComposites || isCooperativeVectorType(typeId)) {
  3281. replicate = numConstituents > 0 &&
  3282. std::equal(constituents.begin() + 1, constituents.end(), constituents.begin());
  3283. }
  3284. if (replicate) {
  3285. numConstituents = 1;
  3286. addCapability(spv::Capability::ReplicatedCompositesEXT);
  3287. addExtension(spv::E_SPV_EXT_replicated_composites);
  3288. }
  3289. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  3290. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3291. op->reserveOperands(constituents.size());
  3292. for (size_t c = 0; c < numConstituents; ++c)
  3293. op->addIdOperand(constituents[c]);
  3294. addInstruction(std::unique_ptr<Instruction>(op));
  3295. return op->getResultId();
  3296. }
  3297. // coopmat conversion
  3298. Id Builder::createCooperativeMatrixConversion(Id typeId, Id source)
  3299. {
  3300. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpCooperativeMatrixConvertNV);
  3301. op->addIdOperand(source);
  3302. addInstruction(std::unique_ptr<Instruction>(op));
  3303. return op->getResultId();
  3304. }
  3305. // coopmat reduce
  3306. Id Builder::createCooperativeMatrixReduce(Op opcode, Id typeId, Id source, unsigned int mask, Id func)
  3307. {
  3308. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3309. op->addIdOperand(source);
  3310. op->addImmediateOperand(mask);
  3311. op->addIdOperand(func);
  3312. addInstruction(std::unique_ptr<Instruction>(op));
  3313. return op->getResultId();
  3314. }
  3315. // coopmat per-element operation
  3316. Id Builder::createCooperativeMatrixPerElementOp(Id typeId, const std::vector<Id>& operands)
  3317. {
  3318. Instruction* op = new Instruction(getUniqueId(), typeId, spv::Op::OpCooperativeMatrixPerElementOpNV);
  3319. // skip operand[0], which is where the result is stored
  3320. for (uint32_t i = 1; i < operands.size(); ++i) {
  3321. op->addIdOperand(operands[i]);
  3322. }
  3323. addInstruction(std::unique_ptr<Instruction>(op));
  3324. return op->getResultId();
  3325. }
  3326. // Vector or scalar constructor
  3327. Id Builder::createConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3328. {
  3329. Id result = NoResult;
  3330. unsigned int numTargetComponents = getNumTypeComponents(resultTypeId);
  3331. unsigned int targetComponent = 0;
  3332. // Special case: when calling a vector constructor with a single scalar
  3333. // argument, smear the scalar
  3334. if (sources.size() == 1 && isScalar(sources[0]) && (numTargetComponents > 1 || isCooperativeVectorType(resultTypeId)))
  3335. return smearScalar(precision, sources[0], resultTypeId);
  3336. // Special case: 2 vectors of equal size
  3337. if (sources.size() == 1 && isVector(sources[0]) && numTargetComponents == getNumComponents(sources[0])) {
  3338. assert(resultTypeId == getTypeId(sources[0]));
  3339. return sources[0];
  3340. }
  3341. // accumulate the arguments for OpCompositeConstruct
  3342. std::vector<Id> constituents;
  3343. Id scalarTypeId = getScalarTypeId(resultTypeId);
  3344. // lambda to store the result of visiting an argument component
  3345. const auto latchResult = [&](Id comp) {
  3346. if (numTargetComponents > 1)
  3347. constituents.push_back(comp);
  3348. else
  3349. result = comp;
  3350. ++targetComponent;
  3351. };
  3352. // lambda to visit a vector argument's components
  3353. const auto accumulateVectorConstituents = [&](Id sourceArg) {
  3354. unsigned int sourceSize = getNumComponents(sourceArg);
  3355. unsigned int sourcesToUse = sourceSize;
  3356. if (sourcesToUse + targetComponent > numTargetComponents)
  3357. sourcesToUse = numTargetComponents - targetComponent;
  3358. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3359. std::vector<unsigned> swiz;
  3360. swiz.push_back(s);
  3361. latchResult(createRvalueSwizzle(precision, scalarTypeId, sourceArg, swiz));
  3362. }
  3363. };
  3364. // lambda to visit a matrix argument's components
  3365. const auto accumulateMatrixConstituents = [&](Id sourceArg) {
  3366. unsigned int sourceSize = getNumColumns(sourceArg) * getNumRows(sourceArg);
  3367. unsigned int sourcesToUse = sourceSize;
  3368. if (sourcesToUse + targetComponent > numTargetComponents)
  3369. sourcesToUse = numTargetComponents - targetComponent;
  3370. unsigned int col = 0;
  3371. unsigned int row = 0;
  3372. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3373. if (row >= getNumRows(sourceArg)) {
  3374. row = 0;
  3375. col++;
  3376. }
  3377. std::vector<Id> indexes;
  3378. indexes.push_back(col);
  3379. indexes.push_back(row);
  3380. latchResult(createCompositeExtract(sourceArg, scalarTypeId, indexes));
  3381. row++;
  3382. }
  3383. };
  3384. // Go through the source arguments, each one could have either
  3385. // a single or multiple components to contribute.
  3386. for (unsigned int i = 0; i < sources.size(); ++i) {
  3387. if (isScalar(sources[i]) || isPointer(sources[i]))
  3388. latchResult(sources[i]);
  3389. else if (isVector(sources[i]) || isCooperativeVector(sources[i]))
  3390. accumulateVectorConstituents(sources[i]);
  3391. else if (isMatrix(sources[i]))
  3392. accumulateMatrixConstituents(sources[i]);
  3393. else
  3394. assert(0);
  3395. if (targetComponent >= numTargetComponents)
  3396. break;
  3397. }
  3398. // If the result is a vector, make it from the gathered constituents.
  3399. if (constituents.size() > 0) {
  3400. result = createCompositeConstruct(resultTypeId, constituents);
  3401. return setPrecision(result, precision);
  3402. } else {
  3403. // Precision was set when generating this component.
  3404. return result;
  3405. }
  3406. }
  3407. // Comments in header
  3408. Id Builder::createMatrixConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3409. {
  3410. Id componentTypeId = getScalarTypeId(resultTypeId);
  3411. unsigned int numCols = getTypeNumColumns(resultTypeId);
  3412. unsigned int numRows = getTypeNumRows(resultTypeId);
  3413. Instruction* instr = module.getInstruction(componentTypeId);
  3414. const unsigned bitCount = instr->getImmediateOperand(0);
  3415. // Optimize matrix constructed from a bigger matrix
  3416. if (isMatrix(sources[0]) && getNumColumns(sources[0]) >= numCols && getNumRows(sources[0]) >= numRows) {
  3417. // To truncate the matrix to a smaller number of rows/columns, we need to:
  3418. // 1. For each column, extract the column and truncate it to the required size using shuffle
  3419. // 2. Assemble the resulting matrix from all columns
  3420. Id matrix = sources[0];
  3421. Id columnTypeId = getContainedTypeId(resultTypeId);
  3422. Id sourceColumnTypeId = getContainedTypeId(getTypeId(matrix));
  3423. std::vector<unsigned> channels;
  3424. for (unsigned int row = 0; row < numRows; ++row)
  3425. channels.push_back(row);
  3426. std::vector<Id> matrixColumns;
  3427. for (unsigned int col = 0; col < numCols; ++col) {
  3428. std::vector<unsigned> indexes;
  3429. indexes.push_back(col);
  3430. Id colv = createCompositeExtract(matrix, sourceColumnTypeId, indexes);
  3431. setPrecision(colv, precision);
  3432. if (numRows != getNumRows(matrix)) {
  3433. matrixColumns.push_back(createRvalueSwizzle(precision, columnTypeId, colv, channels));
  3434. } else {
  3435. matrixColumns.push_back(colv);
  3436. }
  3437. }
  3438. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3439. }
  3440. // Detect a matrix being constructed from a repeated vector of the correct size.
  3441. // Create the composite directly from it.
  3442. if (sources.size() == numCols && isVector(sources[0]) && getNumComponents(sources[0]) == numRows &&
  3443. std::equal(sources.begin() + 1, sources.end(), sources.begin())) {
  3444. return setPrecision(createCompositeConstruct(resultTypeId, sources), precision);
  3445. }
  3446. // Otherwise, will use a two step process
  3447. // 1. make a compile-time 2D array of values
  3448. // 2. construct a matrix from that array
  3449. // Step 1.
  3450. // initialize the array to the identity matrix
  3451. Id ids[maxMatrixSize][maxMatrixSize];
  3452. Id one = (bitCount == 64 ? makeDoubleConstant(1.0) : makeFloatConstant(1.0));
  3453. Id zero = (bitCount == 64 ? makeDoubleConstant(0.0) : makeFloatConstant(0.0));
  3454. for (int col = 0; col < 4; ++col) {
  3455. for (int row = 0; row < 4; ++row) {
  3456. if (col == row)
  3457. ids[col][row] = one;
  3458. else
  3459. ids[col][row] = zero;
  3460. }
  3461. }
  3462. // modify components as dictated by the arguments
  3463. if (sources.size() == 1 && isScalar(sources[0])) {
  3464. // a single scalar; resets the diagonals
  3465. for (int col = 0; col < 4; ++col)
  3466. ids[col][col] = sources[0];
  3467. } else if (isMatrix(sources[0])) {
  3468. // constructing from another matrix; copy over the parts that exist in both the argument and constructee
  3469. Id matrix = sources[0];
  3470. unsigned int minCols = std::min(numCols, getNumColumns(matrix));
  3471. unsigned int minRows = std::min(numRows, getNumRows(matrix));
  3472. for (unsigned int col = 0; col < minCols; ++col) {
  3473. std::vector<unsigned> indexes;
  3474. indexes.push_back(col);
  3475. for (unsigned int row = 0; row < minRows; ++row) {
  3476. indexes.push_back(row);
  3477. ids[col][row] = createCompositeExtract(matrix, componentTypeId, indexes);
  3478. indexes.pop_back();
  3479. setPrecision(ids[col][row], precision);
  3480. }
  3481. }
  3482. } else {
  3483. // fill in the matrix in column-major order with whatever argument components are available
  3484. unsigned int row = 0;
  3485. unsigned int col = 0;
  3486. for (unsigned int arg = 0; arg < sources.size() && col < numCols; ++arg) {
  3487. Id argComp = sources[arg];
  3488. for (unsigned int comp = 0; comp < getNumComponents(sources[arg]); ++comp) {
  3489. if (getNumComponents(sources[arg]) > 1) {
  3490. argComp = createCompositeExtract(sources[arg], componentTypeId, comp);
  3491. setPrecision(argComp, precision);
  3492. }
  3493. ids[col][row++] = argComp;
  3494. if (row == numRows) {
  3495. row = 0;
  3496. col++;
  3497. }
  3498. if (col == numCols) {
  3499. // If more components are provided than fit the matrix, discard the rest.
  3500. break;
  3501. }
  3502. }
  3503. }
  3504. }
  3505. // Step 2: Construct a matrix from that array.
  3506. // First make the column vectors, then make the matrix.
  3507. // make the column vectors
  3508. Id columnTypeId = getContainedTypeId(resultTypeId);
  3509. std::vector<Id> matrixColumns;
  3510. for (unsigned int col = 0; col < numCols; ++col) {
  3511. std::vector<Id> vectorComponents;
  3512. for (unsigned int row = 0; row < numRows; ++row)
  3513. vectorComponents.push_back(ids[col][row]);
  3514. Id column = createCompositeConstruct(columnTypeId, vectorComponents);
  3515. setPrecision(column, precision);
  3516. matrixColumns.push_back(column);
  3517. }
  3518. // make the matrix
  3519. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3520. }
  3521. // Comments in header
  3522. Builder::If::If(Id cond, SelectionControlMask ctrl, Builder& gb) :
  3523. builder(gb),
  3524. condition(cond),
  3525. control(ctrl),
  3526. elseBlock(nullptr)
  3527. {
  3528. function = &builder.getBuildPoint()->getParent();
  3529. // make the blocks, but only put the then-block into the function,
  3530. // the else-block and merge-block will be added later, in order, after
  3531. // earlier code is emitted
  3532. thenBlock = new Block(builder.getUniqueId(), *function);
  3533. mergeBlock = new Block(builder.getUniqueId(), *function);
  3534. // Save the current block, so that we can add in the flow control split when
  3535. // makeEndIf is called.
  3536. headerBlock = builder.getBuildPoint();
  3537. builder.createSelectionMerge(mergeBlock, control);
  3538. function->addBlock(thenBlock);
  3539. builder.setBuildPoint(thenBlock);
  3540. }
  3541. // Comments in header
  3542. void Builder::If::makeBeginElse()
  3543. {
  3544. // Close out the "then" by having it jump to the mergeBlock
  3545. builder.createBranch(true, mergeBlock);
  3546. // Make the first else block and add it to the function
  3547. elseBlock = new Block(builder.getUniqueId(), *function);
  3548. function->addBlock(elseBlock);
  3549. // Start building the else block
  3550. builder.setBuildPoint(elseBlock);
  3551. }
  3552. // Comments in header
  3553. void Builder::If::makeEndIf()
  3554. {
  3555. // jump to the merge block
  3556. builder.createBranch(true, mergeBlock);
  3557. // Go back to the headerBlock and make the flow control split
  3558. builder.setBuildPoint(headerBlock);
  3559. if (elseBlock)
  3560. builder.createConditionalBranch(condition, thenBlock, elseBlock);
  3561. else
  3562. builder.createConditionalBranch(condition, thenBlock, mergeBlock);
  3563. // add the merge block to the function
  3564. function->addBlock(mergeBlock);
  3565. builder.setBuildPoint(mergeBlock);
  3566. }
  3567. // Comments in header
  3568. void Builder::makeSwitch(Id selector, SelectionControlMask control, int numSegments, const std::vector<int>& caseValues,
  3569. const std::vector<int>& valueIndexToSegment, int defaultSegment,
  3570. std::vector<Block*>& segmentBlocks)
  3571. {
  3572. Function& function = buildPoint->getParent();
  3573. // make all the blocks
  3574. for (int s = 0; s < numSegments; ++s)
  3575. segmentBlocks.push_back(new Block(getUniqueId(), function));
  3576. Block* mergeBlock = new Block(getUniqueId(), function);
  3577. // make and insert the switch's selection-merge instruction
  3578. createSelectionMerge(mergeBlock, control);
  3579. // make the switch instruction
  3580. Instruction* switchInst = new Instruction(NoResult, NoType, Op::OpSwitch);
  3581. switchInst->reserveOperands((caseValues.size() * 2) + 2);
  3582. switchInst->addIdOperand(selector);
  3583. auto defaultOrMerge = (defaultSegment >= 0) ? segmentBlocks[defaultSegment] : mergeBlock;
  3584. switchInst->addIdOperand(defaultOrMerge->getId());
  3585. defaultOrMerge->addPredecessor(buildPoint);
  3586. for (int i = 0; i < (int)caseValues.size(); ++i) {
  3587. switchInst->addImmediateOperand(caseValues[i]);
  3588. switchInst->addIdOperand(segmentBlocks[valueIndexToSegment[i]]->getId());
  3589. segmentBlocks[valueIndexToSegment[i]]->addPredecessor(buildPoint);
  3590. }
  3591. addInstruction(std::unique_ptr<Instruction>(switchInst));
  3592. // push the merge block
  3593. switchMerges.push(mergeBlock);
  3594. }
  3595. // Comments in header
  3596. void Builder::addSwitchBreak(bool implicit)
  3597. {
  3598. // branch to the top of the merge block stack
  3599. createBranch(implicit, switchMerges.top());
  3600. createAndSetNoPredecessorBlock("post-switch-break");
  3601. }
  3602. // Comments in header
  3603. void Builder::nextSwitchSegment(std::vector<Block*>& segmentBlock, int nextSegment)
  3604. {
  3605. int lastSegment = nextSegment - 1;
  3606. if (lastSegment >= 0) {
  3607. // Close out previous segment by jumping, if necessary, to next segment
  3608. if (! buildPoint->isTerminated())
  3609. createBranch(true, segmentBlock[nextSegment]);
  3610. }
  3611. Block* block = segmentBlock[nextSegment];
  3612. block->getParent().addBlock(block);
  3613. setBuildPoint(block);
  3614. }
  3615. // Comments in header
  3616. void Builder::endSwitch(std::vector<Block*>& /*segmentBlock*/)
  3617. {
  3618. // Close out previous segment by jumping, if necessary, to next segment
  3619. if (! buildPoint->isTerminated())
  3620. addSwitchBreak(true);
  3621. switchMerges.top()->getParent().addBlock(switchMerges.top());
  3622. setBuildPoint(switchMerges.top());
  3623. switchMerges.pop();
  3624. }
  3625. Block& Builder::makeNewBlock()
  3626. {
  3627. Function& function = buildPoint->getParent();
  3628. auto block = new Block(getUniqueId(), function);
  3629. function.addBlock(block);
  3630. return *block;
  3631. }
  3632. Builder::LoopBlocks& Builder::makeNewLoop()
  3633. {
  3634. // This verbosity is needed to simultaneously get the same behavior
  3635. // everywhere (id's in the same order), have a syntax that works
  3636. // across lots of versions of C++, have no warnings from pedantic
  3637. // compilation modes, and leave the rest of the code alone.
  3638. Block& head = makeNewBlock();
  3639. Block& body = makeNewBlock();
  3640. Block& merge = makeNewBlock();
  3641. Block& continue_target = makeNewBlock();
  3642. LoopBlocks blocks(head, body, merge, continue_target);
  3643. loops.push(blocks);
  3644. return loops.top();
  3645. }
  3646. void Builder::createLoopContinue()
  3647. {
  3648. createBranch(false, &loops.top().continue_target);
  3649. // Set up a block for dead code.
  3650. createAndSetNoPredecessorBlock("post-loop-continue");
  3651. }
  3652. void Builder::createLoopExit()
  3653. {
  3654. createBranch(false, &loops.top().merge);
  3655. // Set up a block for dead code.
  3656. createAndSetNoPredecessorBlock("post-loop-break");
  3657. }
  3658. void Builder::closeLoop()
  3659. {
  3660. loops.pop();
  3661. }
  3662. void Builder::clearAccessChain()
  3663. {
  3664. accessChain.base = NoResult;
  3665. accessChain.indexChain.clear();
  3666. accessChain.instr = NoResult;
  3667. accessChain.swizzle.clear();
  3668. accessChain.component = NoResult;
  3669. accessChain.preSwizzleBaseType = NoType;
  3670. accessChain.isRValue = false;
  3671. accessChain.coherentFlags.clear();
  3672. accessChain.alignment = 0;
  3673. }
  3674. // Comments in header
  3675. void Builder::accessChainPushSwizzle(std::vector<unsigned>& swizzle, Id preSwizzleBaseType,
  3676. AccessChain::CoherentFlags coherentFlags, unsigned int alignment)
  3677. {
  3678. accessChain.coherentFlags |= coherentFlags;
  3679. accessChain.alignment |= alignment;
  3680. // swizzles can be stacked in GLSL, but simplified to a single
  3681. // one here; the base type doesn't change
  3682. if (accessChain.preSwizzleBaseType == NoType)
  3683. accessChain.preSwizzleBaseType = preSwizzleBaseType;
  3684. // if needed, propagate the swizzle for the current access chain
  3685. if (accessChain.swizzle.size() > 0) {
  3686. std::vector<unsigned> oldSwizzle = accessChain.swizzle;
  3687. accessChain.swizzle.resize(0);
  3688. for (unsigned int i = 0; i < swizzle.size(); ++i) {
  3689. assert(swizzle[i] < oldSwizzle.size());
  3690. accessChain.swizzle.push_back(oldSwizzle[swizzle[i]]);
  3691. }
  3692. } else
  3693. accessChain.swizzle = swizzle;
  3694. // determine if we need to track this swizzle anymore
  3695. simplifyAccessChainSwizzle();
  3696. }
  3697. // Comments in header
  3698. void Builder::accessChainStore(Id rvalue, Decoration nonUniform, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment)
  3699. {
  3700. assert(accessChain.isRValue == false);
  3701. transferAccessChainSwizzle(true);
  3702. // MeshShadingEXT outputs don't support loads, so split swizzled stores
  3703. bool isMeshOutput = getStorageClass(accessChain.base) == StorageClass::Output &&
  3704. capabilities.find(spv::Capability::MeshShadingEXT) != capabilities.end();
  3705. // If a swizzle exists and is not full and is not dynamic, then the swizzle will be broken into individual stores.
  3706. if (accessChain.swizzle.size() > 0 &&
  3707. ((getNumTypeComponents(getResultingAccessChainType()) != accessChain.swizzle.size() && accessChain.component == NoResult) || isMeshOutput)) {
  3708. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  3709. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle[i]));
  3710. accessChain.instr = NoResult;
  3711. Id base = collapseAccessChain();
  3712. addDecoration(base, nonUniform);
  3713. accessChain.indexChain.pop_back();
  3714. accessChain.instr = NoResult;
  3715. // dynamic component should be gone
  3716. assert(accessChain.component == NoResult);
  3717. Id source = createCompositeExtract(rvalue, getContainedTypeId(getTypeId(rvalue)), i);
  3718. // take LSB of alignment
  3719. alignment = alignment & ~(alignment & (alignment-1));
  3720. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3721. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3722. }
  3723. createStore(source, base, memoryAccess, scope, alignment);
  3724. }
  3725. }
  3726. else {
  3727. Id base = collapseAccessChain();
  3728. addDecoration(base, nonUniform);
  3729. Id source = rvalue;
  3730. // dynamic component should be gone
  3731. assert(accessChain.component == NoResult);
  3732. // If swizzle still exists, it may be out-of-order, we must load the target vector,
  3733. // extract and insert elements to perform writeMask and/or swizzle.
  3734. if (accessChain.swizzle.size() > 0) {
  3735. Id tempBaseId = createLoad(base, spv::NoPrecision);
  3736. source = createLvalueSwizzle(getTypeId(tempBaseId), tempBaseId, source, accessChain.swizzle);
  3737. }
  3738. // take LSB of alignment
  3739. alignment = alignment & ~(alignment & (alignment-1));
  3740. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3741. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3742. }
  3743. createStore(source, base, memoryAccess, scope, alignment);
  3744. }
  3745. }
  3746. // Comments in header
  3747. Id Builder::accessChainLoad(Decoration precision, Decoration l_nonUniform,
  3748. Decoration r_nonUniform, Id resultType, spv::MemoryAccessMask memoryAccess,
  3749. spv::Scope scope, unsigned int alignment)
  3750. {
  3751. Id id;
  3752. if (accessChain.isRValue) {
  3753. // transfer access chain, but try to stay in registers
  3754. transferAccessChainSwizzle(false);
  3755. if (accessChain.indexChain.size() > 0) {
  3756. Id swizzleBase = accessChain.preSwizzleBaseType != NoType ? accessChain.preSwizzleBaseType : resultType;
  3757. // if all the accesses are constants, we can use OpCompositeExtract
  3758. std::vector<unsigned> indexes;
  3759. bool constant = true;
  3760. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  3761. if (isConstantScalar(accessChain.indexChain[i]))
  3762. indexes.push_back(getConstantScalar(accessChain.indexChain[i]));
  3763. else {
  3764. constant = false;
  3765. break;
  3766. }
  3767. }
  3768. if (constant) {
  3769. id = createCompositeExtract(accessChain.base, swizzleBase, indexes);
  3770. setPrecision(id, precision);
  3771. } else if (isCooperativeVector(accessChain.base)) {
  3772. assert(accessChain.indexChain.size() == 1);
  3773. id = createVectorExtractDynamic(accessChain.base, resultType, accessChain.indexChain[0]);
  3774. } else {
  3775. Id lValue = NoResult;
  3776. if (spvVersion >= Spv_1_4 && isValidInitializer(accessChain.base)) {
  3777. // make a new function variable for this r-value, using an initializer,
  3778. // and mark it as NonWritable so that downstream it can be detected as a lookup
  3779. // table
  3780. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  3781. "indexable", accessChain.base);
  3782. addDecoration(lValue, Decoration::NonWritable);
  3783. } else {
  3784. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  3785. "indexable");
  3786. // store into it
  3787. createStore(accessChain.base, lValue);
  3788. }
  3789. // move base to the new variable
  3790. accessChain.base = lValue;
  3791. accessChain.isRValue = false;
  3792. // load through the access chain
  3793. id = createLoad(collapseAccessChain(), precision);
  3794. }
  3795. } else
  3796. id = accessChain.base; // no precision, it was set when this was defined
  3797. } else {
  3798. transferAccessChainSwizzle(true);
  3799. // take LSB of alignment
  3800. alignment = alignment & ~(alignment & (alignment-1));
  3801. if (getStorageClass(accessChain.base) == StorageClass::PhysicalStorageBufferEXT) {
  3802. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3803. }
  3804. // load through the access chain
  3805. id = collapseAccessChain();
  3806. // Apply nonuniform both to the access chain and the loaded value.
  3807. // Buffer accesses need the access chain decorated, and this is where
  3808. // loaded image types get decorated. TODO: This should maybe move to
  3809. // createImageTextureFunctionCall.
  3810. addDecoration(id, l_nonUniform);
  3811. id = createLoad(id, precision, memoryAccess, scope, alignment);
  3812. addDecoration(id, r_nonUniform);
  3813. }
  3814. // Done, unless there are swizzles to do
  3815. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  3816. return id;
  3817. // Do remaining swizzling
  3818. // Do the basic swizzle
  3819. if (accessChain.swizzle.size() > 0) {
  3820. Id swizzledType = getScalarTypeId(getTypeId(id));
  3821. if (accessChain.swizzle.size() > 1)
  3822. swizzledType = makeVectorType(swizzledType, (int)accessChain.swizzle.size());
  3823. id = createRvalueSwizzle(precision, swizzledType, id, accessChain.swizzle);
  3824. }
  3825. // Do the dynamic component
  3826. if (accessChain.component != NoResult)
  3827. id = setPrecision(createVectorExtractDynamic(id, resultType, accessChain.component), precision);
  3828. addDecoration(id, r_nonUniform);
  3829. return id;
  3830. }
  3831. Id Builder::accessChainGetLValue()
  3832. {
  3833. assert(accessChain.isRValue == false);
  3834. transferAccessChainSwizzle(true);
  3835. Id lvalue = collapseAccessChain();
  3836. // If swizzle exists, it is out-of-order or not full, we must load the target vector,
  3837. // extract and insert elements to perform writeMask and/or swizzle. This does not
  3838. // go with getting a direct l-value pointer.
  3839. assert(accessChain.swizzle.size() == 0);
  3840. assert(accessChain.component == NoResult);
  3841. return lvalue;
  3842. }
  3843. // comment in header
  3844. Id Builder::accessChainGetInferredType()
  3845. {
  3846. // anything to operate on?
  3847. if (accessChain.base == NoResult)
  3848. return NoType;
  3849. Id type = getTypeId(accessChain.base);
  3850. // do initial dereference
  3851. if (! accessChain.isRValue)
  3852. type = getContainedTypeId(type);
  3853. // dereference each index
  3854. for (auto it = accessChain.indexChain.cbegin(); it != accessChain.indexChain.cend(); ++it) {
  3855. if (isStructType(type))
  3856. type = getContainedTypeId(type, getConstantScalar(*it));
  3857. else
  3858. type = getContainedTypeId(type);
  3859. }
  3860. // dereference swizzle
  3861. if (accessChain.swizzle.size() == 1)
  3862. type = getContainedTypeId(type);
  3863. else if (accessChain.swizzle.size() > 1)
  3864. type = makeVectorType(getContainedTypeId(type), (int)accessChain.swizzle.size());
  3865. // dereference component selection
  3866. if (accessChain.component)
  3867. type = getContainedTypeId(type);
  3868. return type;
  3869. }
  3870. void Builder::dump(std::vector<unsigned int>& out) const
  3871. {
  3872. // Header, before first instructions:
  3873. out.push_back(MagicNumber);
  3874. out.push_back(spvVersion);
  3875. out.push_back(builderNumber);
  3876. out.push_back(uniqueId + 1);
  3877. out.push_back(0);
  3878. // Capabilities
  3879. for (auto it = capabilities.cbegin(); it != capabilities.cend(); ++it) {
  3880. Instruction capInst(0, 0, Op::OpCapability);
  3881. capInst.addImmediateOperand(*it);
  3882. capInst.dump(out);
  3883. }
  3884. for (auto it = extensions.cbegin(); it != extensions.cend(); ++it) {
  3885. Instruction extInst(0, 0, Op::OpExtension);
  3886. extInst.addStringOperand(it->c_str());
  3887. extInst.dump(out);
  3888. }
  3889. dumpInstructions(out, imports);
  3890. Instruction memInst(0, 0, Op::OpMemoryModel);
  3891. memInst.addImmediateOperand(addressModel);
  3892. memInst.addImmediateOperand(memoryModel);
  3893. memInst.dump(out);
  3894. // Instructions saved up while building:
  3895. dumpInstructions(out, entryPoints);
  3896. dumpInstructions(out, executionModes);
  3897. // Debug instructions
  3898. dumpInstructions(out, strings);
  3899. dumpSourceInstructions(out);
  3900. for (int e = 0; e < (int)sourceExtensions.size(); ++e) {
  3901. Instruction sourceExtInst(0, 0, Op::OpSourceExtension);
  3902. sourceExtInst.addStringOperand(sourceExtensions[e]);
  3903. sourceExtInst.dump(out);
  3904. }
  3905. dumpInstructions(out, names);
  3906. dumpModuleProcesses(out);
  3907. // Annotation instructions
  3908. dumpInstructions(out, decorations);
  3909. dumpInstructions(out, constantsTypesGlobals);
  3910. dumpInstructions(out, externals);
  3911. // The functions
  3912. module.dump(out);
  3913. }
  3914. //
  3915. // Protected methods.
  3916. //
  3917. // Turn the described access chain in 'accessChain' into an instruction(s)
  3918. // computing its address. This *cannot* include complex swizzles, which must
  3919. // be handled after this is called.
  3920. //
  3921. // Can generate code.
  3922. Id Builder::collapseAccessChain()
  3923. {
  3924. assert(accessChain.isRValue == false);
  3925. // did we already emit an access chain for this?
  3926. if (accessChain.instr != NoResult)
  3927. return accessChain.instr;
  3928. // If we have a dynamic component, we can still transfer
  3929. // that into a final operand to the access chain. We need to remap the
  3930. // dynamic component through the swizzle to get a new dynamic component to
  3931. // update.
  3932. //
  3933. // This was not done in transferAccessChainSwizzle() because it might
  3934. // generate code.
  3935. remapDynamicSwizzle();
  3936. if (accessChain.component != NoResult) {
  3937. // transfer the dynamic component to the access chain
  3938. accessChain.indexChain.push_back(accessChain.component);
  3939. accessChain.component = NoResult;
  3940. }
  3941. // note that non-trivial swizzling is left pending
  3942. // do we have an access chain?
  3943. if (accessChain.indexChain.size() == 0)
  3944. return accessChain.base;
  3945. // emit the access chain
  3946. StorageClass storageClass = (StorageClass)module.getStorageClass(getTypeId(accessChain.base));
  3947. accessChain.instr = createAccessChain(storageClass, accessChain.base, accessChain.indexChain);
  3948. return accessChain.instr;
  3949. }
  3950. // For a dynamic component selection of a swizzle.
  3951. //
  3952. // Turn the swizzle and dynamic component into just a dynamic component.
  3953. //
  3954. // Generates code.
  3955. void Builder::remapDynamicSwizzle()
  3956. {
  3957. // do we have a swizzle to remap a dynamic component through?
  3958. if (accessChain.component != NoResult && accessChain.swizzle.size() > 1) {
  3959. // build a vector of the swizzle for the component to map into
  3960. std::vector<Id> components;
  3961. for (int c = 0; c < (int)accessChain.swizzle.size(); ++c)
  3962. components.push_back(makeUintConstant(accessChain.swizzle[c]));
  3963. Id mapType = makeVectorType(makeUintType(32), (int)accessChain.swizzle.size());
  3964. Id map = makeCompositeConstant(mapType, components);
  3965. // use it
  3966. accessChain.component = createVectorExtractDynamic(map, makeUintType(32), accessChain.component);
  3967. accessChain.swizzle.clear();
  3968. }
  3969. }
  3970. // clear out swizzle if it is redundant, that is reselecting the same components
  3971. // that would be present without the swizzle.
  3972. void Builder::simplifyAccessChainSwizzle()
  3973. {
  3974. // If the swizzle has fewer components than the vector, it is subsetting, and must stay
  3975. // to preserve that fact.
  3976. if (getNumTypeComponents(accessChain.preSwizzleBaseType) > accessChain.swizzle.size())
  3977. return;
  3978. // if components are out of order, it is a swizzle
  3979. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  3980. if (i != accessChain.swizzle[i])
  3981. return;
  3982. }
  3983. // otherwise, there is no need to track this swizzle
  3984. accessChain.swizzle.clear();
  3985. if (accessChain.component == NoResult)
  3986. accessChain.preSwizzleBaseType = NoType;
  3987. }
  3988. // To the extent any swizzling can become part of the chain
  3989. // of accesses instead of a post operation, make it so.
  3990. // If 'dynamic' is true, include transferring the dynamic component,
  3991. // otherwise, leave it pending.
  3992. //
  3993. // Does not generate code. just updates the access chain.
  3994. void Builder::transferAccessChainSwizzle(bool dynamic)
  3995. {
  3996. // non existent?
  3997. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  3998. return;
  3999. // too complex?
  4000. // (this requires either a swizzle, or generating code for a dynamic component)
  4001. if (accessChain.swizzle.size() > 1)
  4002. return;
  4003. // single component, either in the swizzle and/or dynamic component
  4004. if (accessChain.swizzle.size() == 1) {
  4005. assert(accessChain.component == NoResult);
  4006. // handle static component selection
  4007. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle.front()));
  4008. accessChain.swizzle.clear();
  4009. accessChain.preSwizzleBaseType = NoType;
  4010. } else if (dynamic && accessChain.component != NoResult) {
  4011. assert(accessChain.swizzle.size() == 0);
  4012. // handle dynamic component
  4013. accessChain.indexChain.push_back(accessChain.component);
  4014. accessChain.preSwizzleBaseType = NoType;
  4015. accessChain.component = NoResult;
  4016. }
  4017. }
  4018. // Utility method for creating a new block and setting the insert point to
  4019. // be in it. This is useful for flow-control operations that need a "dummy"
  4020. // block proceeding them (e.g. instructions after a discard, etc).
  4021. void Builder::createAndSetNoPredecessorBlock(const char* /*name*/)
  4022. {
  4023. Block* block = new Block(getUniqueId(), buildPoint->getParent());
  4024. block->setUnreachable();
  4025. buildPoint->getParent().addBlock(block);
  4026. setBuildPoint(block);
  4027. // if (name)
  4028. // addName(block->getId(), name);
  4029. }
  4030. // Comments in header
  4031. void Builder::createBranch(bool implicit, Block* block)
  4032. {
  4033. Instruction* branch = new Instruction(Op::OpBranch);
  4034. branch->addIdOperand(block->getId());
  4035. if (implicit) {
  4036. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  4037. }
  4038. else {
  4039. addInstruction(std::unique_ptr<Instruction>(branch));
  4040. }
  4041. block->addPredecessor(buildPoint);
  4042. }
  4043. void Builder::createSelectionMerge(Block* mergeBlock, SelectionControlMask control)
  4044. {
  4045. Instruction* merge = new Instruction(Op::OpSelectionMerge);
  4046. merge->reserveOperands(2);
  4047. merge->addIdOperand(mergeBlock->getId());
  4048. merge->addImmediateOperand(control);
  4049. addInstruction(std::unique_ptr<Instruction>(merge));
  4050. }
  4051. void Builder::createLoopMerge(Block* mergeBlock, Block* continueBlock, LoopControlMask control,
  4052. const std::vector<unsigned int>& operands)
  4053. {
  4054. Instruction* merge = new Instruction(Op::OpLoopMerge);
  4055. merge->reserveOperands(operands.size() + 3);
  4056. merge->addIdOperand(mergeBlock->getId());
  4057. merge->addIdOperand(continueBlock->getId());
  4058. merge->addImmediateOperand(control);
  4059. for (int op = 0; op < (int)operands.size(); ++op)
  4060. merge->addImmediateOperand(operands[op]);
  4061. addInstruction(std::unique_ptr<Instruction>(merge));
  4062. }
  4063. void Builder::createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock)
  4064. {
  4065. Instruction* branch = new Instruction(Op::OpBranchConditional);
  4066. branch->reserveOperands(3);
  4067. branch->addIdOperand(condition);
  4068. branch->addIdOperand(thenBlock->getId());
  4069. branch->addIdOperand(elseBlock->getId());
  4070. // A conditional branch is always attached to a condition expression
  4071. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  4072. thenBlock->addPredecessor(buildPoint);
  4073. elseBlock->addPredecessor(buildPoint);
  4074. }
  4075. // OpSource
  4076. // [OpSourceContinued]
  4077. // ...
  4078. void Builder::dumpSourceInstructions(const spv::Id fileId, const std::string& text,
  4079. std::vector<unsigned int>& out) const
  4080. {
  4081. const int maxWordCount = 0xFFFF;
  4082. const int opSourceWordCount = 4;
  4083. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  4084. if (sourceLang != SourceLanguage::Unknown) {
  4085. // OpSource Language Version File Source
  4086. Instruction sourceInst(NoResult, NoType, Op::OpSource);
  4087. sourceInst.reserveOperands(3);
  4088. sourceInst.addImmediateOperand(sourceLang);
  4089. sourceInst.addImmediateOperand(sourceVersion);
  4090. // File operand
  4091. if (fileId != NoResult) {
  4092. sourceInst.addIdOperand(fileId);
  4093. // Source operand
  4094. if (text.size() > 0) {
  4095. int nextByte = 0;
  4096. std::string subString;
  4097. while ((int)text.size() - nextByte > 0) {
  4098. subString = text.substr(nextByte, nonNullBytesPerInstruction);
  4099. if (nextByte == 0) {
  4100. // OpSource
  4101. sourceInst.addStringOperand(subString.c_str());
  4102. sourceInst.dump(out);
  4103. } else {
  4104. // OpSourcContinued
  4105. Instruction sourceContinuedInst(Op::OpSourceContinued);
  4106. sourceContinuedInst.addStringOperand(subString.c_str());
  4107. sourceContinuedInst.dump(out);
  4108. }
  4109. nextByte += nonNullBytesPerInstruction;
  4110. }
  4111. } else
  4112. sourceInst.dump(out);
  4113. } else
  4114. sourceInst.dump(out);
  4115. }
  4116. }
  4117. // Dump an OpSource[Continued] sequence for the source and every include file
  4118. void Builder::dumpSourceInstructions(std::vector<unsigned int>& out) const
  4119. {
  4120. if (emitNonSemanticShaderDebugInfo) return;
  4121. dumpSourceInstructions(mainFileId, sourceText, out);
  4122. for (auto iItr = includeFiles.begin(); iItr != includeFiles.end(); ++iItr)
  4123. dumpSourceInstructions(iItr->first, *iItr->second, out);
  4124. }
  4125. template <class Range> void Builder::dumpInstructions(std::vector<unsigned int>& out, const Range& instructions) const
  4126. {
  4127. for (const auto& inst : instructions) {
  4128. inst->dump(out);
  4129. }
  4130. }
  4131. void Builder::dumpModuleProcesses(std::vector<unsigned int>& out) const
  4132. {
  4133. for (int i = 0; i < (int)moduleProcesses.size(); ++i) {
  4134. Instruction moduleProcessed(Op::OpModuleProcessed);
  4135. moduleProcessed.addStringOperand(moduleProcesses[i]);
  4136. moduleProcessed.dump(out);
  4137. }
  4138. }
  4139. bool Builder::DecorationInstructionLessThan::operator()(const std::unique_ptr<Instruction>& lhs,
  4140. const std::unique_ptr<Instruction>& rhs) const
  4141. {
  4142. // Order by the id to which the decoration applies first. This is more intuitive.
  4143. assert(lhs->isIdOperand(0) && rhs->isIdOperand(0));
  4144. if (lhs->getIdOperand(0) != rhs->getIdOperand(0)) {
  4145. return lhs->getIdOperand(0) < rhs->getIdOperand(0);
  4146. }
  4147. if (lhs->getOpCode() != rhs->getOpCode())
  4148. return lhs->getOpCode() < rhs->getOpCode();
  4149. // Now compare the operands.
  4150. int minSize = std::min(lhs->getNumOperands(), rhs->getNumOperands());
  4151. for (int i = 1; i < minSize; ++i) {
  4152. if (lhs->isIdOperand(i) != rhs->isIdOperand(i)) {
  4153. return lhs->isIdOperand(i) < rhs->isIdOperand(i);
  4154. }
  4155. if (lhs->isIdOperand(i)) {
  4156. if (lhs->getIdOperand(i) != rhs->getIdOperand(i)) {
  4157. return lhs->getIdOperand(i) < rhs->getIdOperand(i);
  4158. }
  4159. } else {
  4160. if (lhs->getImmediateOperand(i) != rhs->getImmediateOperand(i)) {
  4161. return lhs->getImmediateOperand(i) < rhs->getImmediateOperand(i);
  4162. }
  4163. }
  4164. }
  4165. if (lhs->getNumOperands() != rhs->getNumOperands())
  4166. return lhs->getNumOperands() < rhs->getNumOperands();
  4167. // In this case they are equal.
  4168. return false;
  4169. }
  4170. } // end spv namespace