SpvBuilder.cpp 171 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778
  1. //
  2. // Copyright (C) 2014-2015 LunarG, Inc.
  3. // Copyright (C) 2015-2018 Google, Inc.
  4. // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
  5. //
  6. // All rights reserved.
  7. //
  8. // Redistribution and use in source and binary forms, with or without
  9. // modification, are permitted provided that the following conditions
  10. // are met:
  11. //
  12. // Redistributions of source code must retain the above copyright
  13. // notice, this list of conditions and the following disclaimer.
  14. //
  15. // Redistributions in binary form must reproduce the above
  16. // copyright notice, this list of conditions and the following
  17. // disclaimer in the documentation and/or other materials provided
  18. // with the distribution.
  19. //
  20. // Neither the name of 3Dlabs Inc. Ltd. nor the names of its
  21. // contributors may be used to endorse or promote products derived
  22. // from this software without specific prior written permission.
  23. //
  24. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  25. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  26. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  27. // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  28. // COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  29. // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  30. // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  31. // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  32. // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  33. // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
  34. // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  35. // POSSIBILITY OF SUCH DAMAGE.
  36. //
  37. // Helper for making SPIR-V IR. Generally, this is documented in the header
  38. // SpvBuilder.h.
  39. //
  40. #include <cassert>
  41. #include <cstdlib>
  42. #include <unordered_set>
  43. #include <algorithm>
  44. #include "SpvBuilder.h"
  45. #include "spvUtil.h"
  46. #include "hex_float.h"
  47. #ifndef _WIN32
  48. #include <cstdio>
  49. #endif
  50. namespace spv {
  51. Builder::Builder(unsigned int spvVersion, unsigned int magicNumber, SpvBuildLogger* buildLogger) :
  52. spvVersion(spvVersion),
  53. sourceLang(SourceLanguage::Unknown),
  54. sourceVersion(0),
  55. addressModel(AddressingModel::Logical),
  56. memoryModel(MemoryModel::GLSL450),
  57. builderNumber(magicNumber),
  58. buildPoint(nullptr),
  59. uniqueId(0),
  60. entryPointFunction(nullptr),
  61. generatingOpCodeForSpecConst(false),
  62. logger(buildLogger)
  63. {
  64. clearAccessChain();
  65. }
  66. Builder::~Builder()
  67. {
  68. }
  69. Id Builder::import(const char* name)
  70. {
  71. Instruction* import = new Instruction(getUniqueId(), NoType, Op::OpExtInstImport);
  72. import->addStringOperand(name);
  73. module.mapInstruction(import);
  74. imports.push_back(std::unique_ptr<Instruction>(import));
  75. return import->getResultId();
  76. }
  77. // For creating new groupedTypes (will return old type if the requested one was already made).
  78. Id Builder::makeVoidType()
  79. {
  80. Instruction* type;
  81. if (groupedTypes[enumCast(Op::OpTypeVoid)].size() == 0) {
  82. Id typeId = getUniqueId();
  83. type = new Instruction(typeId, NoType, Op::OpTypeVoid);
  84. groupedTypes[enumCast(Op::OpTypeVoid)].push_back(type);
  85. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  86. module.mapInstruction(type);
  87. // Core OpTypeVoid used for debug void type
  88. if (emitNonSemanticShaderDebugInfo)
  89. debugId[typeId] = typeId;
  90. } else
  91. type = groupedTypes[enumCast(Op::OpTypeVoid)].back();
  92. return type->getResultId();
  93. }
  94. Id Builder::makeBoolType()
  95. {
  96. Instruction* type;
  97. if (groupedTypes[enumCast(Op::OpTypeBool)].size() == 0) {
  98. type = new Instruction(getUniqueId(), NoType, Op::OpTypeBool);
  99. groupedTypes[enumCast(Op::OpTypeBool)].push_back(type);
  100. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  101. module.mapInstruction(type);
  102. if (emitNonSemanticShaderDebugInfo) {
  103. auto const debugResultId = makeBoolDebugType(32);
  104. debugId[type->getResultId()] = debugResultId;
  105. }
  106. } else
  107. type = groupedTypes[enumCast(Op::OpTypeBool)].back();
  108. return type->getResultId();
  109. }
  110. Id Builder::makeSamplerType()
  111. {
  112. Instruction* type;
  113. if (groupedTypes[enumCast(Op::OpTypeSampler)].size() == 0) {
  114. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampler);
  115. groupedTypes[enumCast(Op::OpTypeSampler)].push_back(type);
  116. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  117. module.mapInstruction(type);
  118. } else
  119. type = groupedTypes[enumCast(Op::OpTypeSampler)].back();
  120. if (emitNonSemanticShaderDebugInfo)
  121. {
  122. auto const debugResultId = makeCompositeDebugType({}, "type.sampler", NonSemanticShaderDebugInfo100Structure, true);
  123. debugId[type->getResultId()] = debugResultId;
  124. }
  125. return type->getResultId();
  126. }
  127. Id Builder::makePointer(StorageClass storageClass, Id pointee)
  128. {
  129. // try to find it
  130. Instruction* type;
  131. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  132. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  133. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  134. type->getIdOperand(1) == pointee)
  135. return type->getResultId();
  136. }
  137. // not found, make it
  138. type = new Instruction(getUniqueId(), NoType, Op::OpTypePointer);
  139. type->reserveOperands(2);
  140. type->addImmediateOperand(storageClass);
  141. type->addIdOperand(pointee);
  142. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  143. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  144. module.mapInstruction(type);
  145. if (emitNonSemanticShaderDebugInfo) {
  146. const Id debugResultId = makePointerDebugType(storageClass, pointee);
  147. debugId[type->getResultId()] = debugResultId;
  148. }
  149. return type->getResultId();
  150. }
  151. Id Builder::makeForwardPointer(StorageClass storageClass)
  152. {
  153. // Caching/uniquifying doesn't work here, because we don't know the
  154. // pointee type and there can be multiple forward pointers of the same
  155. // storage type. Somebody higher up in the stack must keep track.
  156. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeForwardPointer);
  157. type->addImmediateOperand(storageClass);
  158. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  159. module.mapInstruction(type);
  160. if (emitNonSemanticShaderDebugInfo) {
  161. const Id debugResultId = makeForwardPointerDebugType(storageClass);
  162. debugId[type->getResultId()] = debugResultId;
  163. }
  164. return type->getResultId();
  165. }
  166. Id Builder::makePointerFromForwardPointer(StorageClass storageClass, Id forwardPointerType, Id pointee)
  167. {
  168. // try to find it
  169. Instruction* type;
  170. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  171. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  172. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  173. type->getIdOperand(1) == pointee)
  174. return type->getResultId();
  175. }
  176. type = new Instruction(forwardPointerType, NoType, Op::OpTypePointer);
  177. type->reserveOperands(2);
  178. type->addImmediateOperand(storageClass);
  179. type->addIdOperand(pointee);
  180. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  181. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  182. module.mapInstruction(type);
  183. // If we are emitting nonsemantic debuginfo, we need to patch the debug pointer type
  184. // that was emitted alongside the forward pointer, now that we have a pointee debug
  185. // type for it to point to.
  186. if (emitNonSemanticShaderDebugInfo) {
  187. Instruction *debugForwardPointer = module.getInstruction(debugId[forwardPointerType]);
  188. assert(debugId[pointee]);
  189. debugForwardPointer->setIdOperand(2, debugId[pointee]);
  190. }
  191. return type->getResultId();
  192. }
  193. Id Builder::makeIntegerType(int width, bool hasSign)
  194. {
  195. // try to find it
  196. Instruction* type;
  197. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeInt)].size(); ++t) {
  198. type = groupedTypes[enumCast(Op::OpTypeInt)][t];
  199. if (type->getImmediateOperand(0) == (unsigned)width &&
  200. type->getImmediateOperand(1) == (hasSign ? 1u : 0u))
  201. return type->getResultId();
  202. }
  203. // not found, make it
  204. type = new Instruction(getUniqueId(), NoType, Op::OpTypeInt);
  205. type->reserveOperands(2);
  206. type->addImmediateOperand(width);
  207. type->addImmediateOperand(hasSign ? 1 : 0);
  208. groupedTypes[enumCast(Op::OpTypeInt)].push_back(type);
  209. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  210. module.mapInstruction(type);
  211. // deal with capabilities
  212. switch (width) {
  213. case 8:
  214. case 16:
  215. // these are currently handled by storage-type declarations and post processing
  216. break;
  217. case 64:
  218. addCapability(Capability::Int64);
  219. break;
  220. default:
  221. break;
  222. }
  223. if (emitNonSemanticShaderDebugInfo)
  224. {
  225. auto const debugResultId = makeIntegerDebugType(width, hasSign);
  226. debugId[type->getResultId()] = debugResultId;
  227. }
  228. return type->getResultId();
  229. }
  230. Id Builder::makeFloatType(int width)
  231. {
  232. // try to find it
  233. Instruction* type;
  234. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  235. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  236. if (type->getNumOperands() != 1) {
  237. continue;
  238. }
  239. if (type->getImmediateOperand(0) == (unsigned)width)
  240. return type->getResultId();
  241. }
  242. // not found, make it
  243. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  244. type->addImmediateOperand(width);
  245. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  246. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  247. module.mapInstruction(type);
  248. // deal with capabilities
  249. switch (width) {
  250. case 16:
  251. // currently handled by storage-type declarations and post processing
  252. break;
  253. case 64:
  254. addCapability(Capability::Float64);
  255. break;
  256. default:
  257. break;
  258. }
  259. if (emitNonSemanticShaderDebugInfo)
  260. {
  261. auto const debugResultId = makeFloatDebugType(width);
  262. debugId[type->getResultId()] = debugResultId;
  263. }
  264. return type->getResultId();
  265. }
  266. Id Builder::makeBFloat16Type()
  267. {
  268. // try to find it
  269. Instruction* type;
  270. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  271. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  272. if (type->getNumOperands() != 2) {
  273. continue;
  274. }
  275. if (type->getImmediateOperand(0) == (unsigned)16 &&
  276. type->getImmediateOperand(1) == FPEncoding::BFloat16KHR)
  277. return type->getResultId();
  278. }
  279. // not found, make it
  280. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  281. type->addImmediateOperand(16);
  282. type->addImmediateOperand(FPEncoding::BFloat16KHR);
  283. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  284. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  285. module.mapInstruction(type);
  286. addExtension(spv::E_SPV_KHR_bfloat16);
  287. addCapability(Capability::BFloat16TypeKHR);
  288. #if 0
  289. // XXX not supported
  290. if (emitNonSemanticShaderDebugInfo)
  291. {
  292. auto const debugResultId = makeFloatDebugType(width);
  293. debugId[type->getResultId()] = debugResultId;
  294. }
  295. #endif
  296. return type->getResultId();
  297. }
  298. Id Builder::makeFloatE5M2Type()
  299. {
  300. // try to find it
  301. Instruction* type;
  302. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  303. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  304. if (type->getNumOperands() != 2) {
  305. continue;
  306. }
  307. if (type->getImmediateOperand(0) == (unsigned)8 &&
  308. type->getImmediateOperand(1) == FPEncoding::Float8E5M2EXT)
  309. return type->getResultId();
  310. }
  311. // not found, make it
  312. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  313. type->addImmediateOperand(8);
  314. type->addImmediateOperand(FPEncoding::Float8E5M2EXT);
  315. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  316. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  317. module.mapInstruction(type);
  318. addExtension(spv::E_SPV_EXT_float8);
  319. addCapability(Capability::Float8EXT);
  320. #if 0
  321. // XXX not supported
  322. if (emitNonSemanticShaderDebugInfo)
  323. {
  324. auto const debugResultId = makeFloatDebugType(width);
  325. debugId[type->getResultId()] = debugResultId;
  326. }
  327. #endif
  328. return type->getResultId();
  329. }
  330. Id Builder::makeFloatE4M3Type()
  331. {
  332. // try to find it
  333. Instruction* type;
  334. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  335. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  336. if (type->getNumOperands() != 2) {
  337. continue;
  338. }
  339. if (type->getImmediateOperand(0) == (unsigned)8 &&
  340. type->getImmediateOperand(1) == FPEncoding::Float8E4M3EXT)
  341. return type->getResultId();
  342. }
  343. // not found, make it
  344. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  345. type->addImmediateOperand(8);
  346. type->addImmediateOperand(FPEncoding::Float8E4M3EXT);
  347. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  348. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  349. module.mapInstruction(type);
  350. addExtension(spv::E_SPV_EXT_float8);
  351. addCapability(Capability::Float8EXT);
  352. #if 0
  353. // XXX not supported
  354. if (emitNonSemanticShaderDebugInfo)
  355. {
  356. auto const debugResultId = makeFloatDebugType(width);
  357. debugId[type->getResultId()] = debugResultId;
  358. }
  359. #endif
  360. return type->getResultId();
  361. }
  362. // Make a struct without checking for duplication.
  363. // See makeStructResultType() for non-decorated structs
  364. // needed as the result of some instructions, which does
  365. // check for duplicates.
  366. Id Builder::makeStructType(const std::vector<Id>& members, const char* name, bool const compilerGenerated)
  367. {
  368. // Don't look for previous one, because in the general case,
  369. // structs can be duplicated except for decorations.
  370. // not found, make it
  371. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeStruct);
  372. for (int op = 0; op < (int)members.size(); ++op)
  373. type->addIdOperand(members[op]);
  374. groupedTypes[enumCast(Op::OpTypeStruct)].push_back(type);
  375. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  376. module.mapInstruction(type);
  377. addName(type->getResultId(), name);
  378. if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
  379. {
  380. auto const debugResultId = makeCompositeDebugType(members, name, NonSemanticShaderDebugInfo100Structure);
  381. debugId[type->getResultId()] = debugResultId;
  382. }
  383. return type->getResultId();
  384. }
  385. // Make a struct for the simple results of several instructions,
  386. // checking for duplication.
  387. Id Builder::makeStructResultType(Id type0, Id type1)
  388. {
  389. // try to find it
  390. Instruction* type;
  391. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeStruct)].size(); ++t) {
  392. type = groupedTypes[enumCast(Op::OpTypeStruct)][t];
  393. if (type->getNumOperands() != 2)
  394. continue;
  395. if (type->getIdOperand(0) != type0 ||
  396. type->getIdOperand(1) != type1)
  397. continue;
  398. return type->getResultId();
  399. }
  400. // not found, make it
  401. std::vector<spv::Id> members;
  402. members.push_back(type0);
  403. members.push_back(type1);
  404. return makeStructType(members, "ResType");
  405. }
  406. Id Builder::makeVectorType(Id component, int size)
  407. {
  408. // try to find it
  409. Instruction* type;
  410. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeVector)].size(); ++t) {
  411. type = groupedTypes[enumCast(Op::OpTypeVector)][t];
  412. if (type->getIdOperand(0) == component &&
  413. type->getImmediateOperand(1) == (unsigned)size)
  414. return type->getResultId();
  415. }
  416. // not found, make it
  417. type = new Instruction(getUniqueId(), NoType, Op::OpTypeVector);
  418. type->reserveOperands(2);
  419. type->addIdOperand(component);
  420. type->addImmediateOperand(size);
  421. groupedTypes[enumCast(Op::OpTypeVector)].push_back(type);
  422. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  423. module.mapInstruction(type);
  424. if (emitNonSemanticShaderDebugInfo)
  425. {
  426. auto const debugResultId = makeVectorDebugType(component, size);
  427. debugId[type->getResultId()] = debugResultId;
  428. }
  429. return type->getResultId();
  430. }
  431. Id Builder::makeMatrixType(Id component, int cols, int rows)
  432. {
  433. assert(cols <= maxMatrixSize && rows <= maxMatrixSize);
  434. Id column = makeVectorType(component, rows);
  435. // try to find it
  436. Instruction* type;
  437. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeMatrix)].size(); ++t) {
  438. type = groupedTypes[enumCast(Op::OpTypeMatrix)][t];
  439. if (type->getIdOperand(0) == column &&
  440. type->getImmediateOperand(1) == (unsigned)cols)
  441. return type->getResultId();
  442. }
  443. // not found, make it
  444. type = new Instruction(getUniqueId(), NoType, Op::OpTypeMatrix);
  445. type->reserveOperands(2);
  446. type->addIdOperand(column);
  447. type->addImmediateOperand(cols);
  448. groupedTypes[enumCast(Op::OpTypeMatrix)].push_back(type);
  449. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  450. module.mapInstruction(type);
  451. if (emitNonSemanticShaderDebugInfo)
  452. {
  453. auto const debugResultId = makeMatrixDebugType(column, cols);
  454. debugId[type->getResultId()] = debugResultId;
  455. }
  456. return type->getResultId();
  457. }
  458. Id Builder::makeCooperativeMatrixTypeKHR(Id component, Id scope, Id rows, Id cols, Id use)
  459. {
  460. // try to find it
  461. Instruction* type;
  462. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].size(); ++t) {
  463. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)][t];
  464. if (type->getIdOperand(0) == component &&
  465. type->getIdOperand(1) == scope &&
  466. type->getIdOperand(2) == rows &&
  467. type->getIdOperand(3) == cols &&
  468. type->getIdOperand(4) == use)
  469. return type->getResultId();
  470. }
  471. // not found, make it
  472. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixKHR);
  473. type->reserveOperands(5);
  474. type->addIdOperand(component);
  475. type->addIdOperand(scope);
  476. type->addIdOperand(rows);
  477. type->addIdOperand(cols);
  478. type->addIdOperand(use);
  479. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].push_back(type);
  480. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  481. module.mapInstruction(type);
  482. if (emitNonSemanticShaderDebugInfo)
  483. {
  484. // Find a name for one of the parameters. It can either come from debuginfo for another
  485. // type, or an OpName from a constant.
  486. auto const findName = [&](Id id) {
  487. Id id2 = debugId[id];
  488. for (auto &t : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic]) {
  489. if (t->getResultId() == id2) {
  490. for (auto &s : strings) {
  491. if (s->getResultId() == t->getIdOperand(2)) {
  492. return s->getNameString();
  493. }
  494. }
  495. }
  496. }
  497. for (auto &t : names) {
  498. if (t->getIdOperand(0) == id) {
  499. return t->getNameString();
  500. }
  501. }
  502. return "unknown";
  503. };
  504. std::string debugName = "coopmat<";
  505. debugName += std::string(findName(component)) + ", ";
  506. if (isConstantScalar(scope)) {
  507. debugName += std::string("gl_Scope") + std::string(spv::ScopeToString((spv::Scope)getConstantScalar(scope))) + ", ";
  508. } else {
  509. debugName += std::string(findName(scope)) + ", ";
  510. }
  511. debugName += std::string(findName(rows)) + ", ";
  512. debugName += std::string(findName(cols)) + ">";
  513. // There's no nonsemantic debug info instruction for cooperative matrix types,
  514. // use opaque composite instead.
  515. auto const debugResultId = makeCompositeDebugType({}, debugName.c_str(), NonSemanticShaderDebugInfo100Structure, true);
  516. debugId[type->getResultId()] = debugResultId;
  517. }
  518. return type->getResultId();
  519. }
  520. Id Builder::makeCooperativeMatrixTypeNV(Id component, Id scope, Id rows, Id cols)
  521. {
  522. // try to find it
  523. Instruction* type;
  524. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].size(); ++t) {
  525. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)][t];
  526. if (type->getIdOperand(0) == component && type->getIdOperand(1) == scope && type->getIdOperand(2) == rows &&
  527. type->getIdOperand(3) == cols)
  528. return type->getResultId();
  529. }
  530. // not found, make it
  531. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixNV);
  532. type->reserveOperands(4);
  533. type->addIdOperand(component);
  534. type->addIdOperand(scope);
  535. type->addIdOperand(rows);
  536. type->addIdOperand(cols);
  537. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].push_back(type);
  538. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  539. module.mapInstruction(type);
  540. return type->getResultId();
  541. }
  542. Id Builder::makeCooperativeMatrixTypeWithSameShape(Id component, Id otherType)
  543. {
  544. Instruction* instr = module.getInstruction(otherType);
  545. if (instr->getOpCode() == Op::OpTypeCooperativeMatrixNV) {
  546. return makeCooperativeMatrixTypeNV(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3));
  547. } else {
  548. assert(instr->getOpCode() == Op::OpTypeCooperativeMatrixKHR);
  549. return makeCooperativeMatrixTypeKHR(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3), instr->getIdOperand(4));
  550. }
  551. }
  552. Id Builder::makeCooperativeVectorTypeNV(Id componentType, Id components)
  553. {
  554. // try to find it
  555. Instruction* type;
  556. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].size(); ++t) {
  557. type = groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)][t];
  558. if (type->getIdOperand(0) == componentType &&
  559. type->getIdOperand(1) == components)
  560. return type->getResultId();
  561. }
  562. // not found, make it
  563. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeVectorNV);
  564. type->addIdOperand(componentType);
  565. type->addIdOperand(components);
  566. groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].push_back(type);
  567. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  568. module.mapInstruction(type);
  569. return type->getResultId();
  570. }
  571. Id Builder::makeTensorTypeARM(Id elementType, Id rank)
  572. {
  573. // See if an OpTypeTensorARM with same element type and rank already exists.
  574. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeTensorARM)].size(); ++t) {
  575. const Instruction *type = groupedTypes[enumCast(Op::OpTypeTensorARM)][t];
  576. if (type->getIdOperand(0) == elementType && type->getIdOperand(1) == rank)
  577. return type->getResultId();
  578. }
  579. // Not found, make it.
  580. std::unique_ptr<Instruction> type(new Instruction(getUniqueId(), NoType, Op::OpTypeTensorARM));
  581. type->addIdOperand(elementType);
  582. type->addIdOperand(rank);
  583. groupedTypes[enumCast(Op::OpTypeTensorARM)].push_back(type.get());
  584. module.mapInstruction(type.get());
  585. Id resultID = type->getResultId();
  586. constantsTypesGlobals.push_back(std::move(type));
  587. return resultID;
  588. }
  589. Id Builder::makeGenericType(spv::Op opcode, std::vector<spv::IdImmediate>& operands)
  590. {
  591. // try to find it
  592. Instruction* type;
  593. for (int t = 0; t < (int)groupedTypes[enumCast(opcode)].size(); ++t) {
  594. type = groupedTypes[enumCast(opcode)][t];
  595. if (static_cast<size_t>(type->getNumOperands()) != operands.size())
  596. continue; // Number mismatch, find next
  597. bool match = true;
  598. for (int op = 0; match && op < (int)operands.size(); ++op) {
  599. match = (operands[op].isId ? type->getIdOperand(op) : type->getImmediateOperand(op)) == operands[op].word;
  600. }
  601. if (match)
  602. return type->getResultId();
  603. }
  604. // not found, make it
  605. type = new Instruction(getUniqueId(), NoType, opcode);
  606. type->reserveOperands(operands.size());
  607. for (size_t op = 0; op < operands.size(); ++op) {
  608. if (operands[op].isId)
  609. type->addIdOperand(operands[op].word);
  610. else
  611. type->addImmediateOperand(operands[op].word);
  612. }
  613. groupedTypes[enumCast(opcode)].push_back(type);
  614. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  615. module.mapInstruction(type);
  616. return type->getResultId();
  617. }
  618. // TODO: performance: track arrays per stride
  619. // If a stride is supplied (non-zero) make an array.
  620. // If no stride (0), reuse previous array types.
  621. // 'size' is an Id of a constant or specialization constant of the array size
  622. Id Builder::makeArrayType(Id element, Id sizeId, int stride)
  623. {
  624. Instruction* type;
  625. if (stride == 0) {
  626. // try to find existing type
  627. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeArray)].size(); ++t) {
  628. type = groupedTypes[enumCast(Op::OpTypeArray)][t];
  629. if (type->getIdOperand(0) == element &&
  630. type->getIdOperand(1) == sizeId &&
  631. explicitlyLaidOut.find(type->getResultId()) == explicitlyLaidOut.end())
  632. return type->getResultId();
  633. }
  634. }
  635. // not found, make it
  636. type = new Instruction(getUniqueId(), NoType, Op::OpTypeArray);
  637. type->reserveOperands(2);
  638. type->addIdOperand(element);
  639. type->addIdOperand(sizeId);
  640. groupedTypes[enumCast(Op::OpTypeArray)].push_back(type);
  641. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  642. module.mapInstruction(type);
  643. if (stride != 0) {
  644. explicitlyLaidOut.insert(type->getResultId());
  645. }
  646. if (emitNonSemanticShaderDebugInfo)
  647. {
  648. auto const debugResultId = makeArrayDebugType(element, sizeId);
  649. debugId[type->getResultId()] = debugResultId;
  650. }
  651. return type->getResultId();
  652. }
  653. Id Builder::makeRuntimeArray(Id element)
  654. {
  655. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeRuntimeArray);
  656. type->addIdOperand(element);
  657. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  658. module.mapInstruction(type);
  659. if (emitNonSemanticShaderDebugInfo)
  660. {
  661. auto const debugResultId = makeArrayDebugType(element, makeUintConstant(0));
  662. debugId[type->getResultId()] = debugResultId;
  663. }
  664. return type->getResultId();
  665. }
  666. Id Builder::makeFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  667. {
  668. // try to find it
  669. Instruction* type;
  670. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFunction)].size(); ++t) {
  671. type = groupedTypes[enumCast(Op::OpTypeFunction)][t];
  672. if (type->getIdOperand(0) != returnType || (int)paramTypes.size() != type->getNumOperands() - 1)
  673. continue;
  674. bool mismatch = false;
  675. for (int p = 0; p < (int)paramTypes.size(); ++p) {
  676. if (paramTypes[p] != type->getIdOperand(p + 1)) {
  677. mismatch = true;
  678. break;
  679. }
  680. }
  681. if (! mismatch)
  682. {
  683. // If compiling HLSL, glslang will create a wrapper function around the entrypoint. Accordingly, a void(void)
  684. // function type is created for the wrapper function. However, nonsemantic shader debug information is disabled
  685. // while creating the HLSL wrapper. Consequently, if we encounter another void(void) function, we need to create
  686. // the associated debug function type if it hasn't been created yet.
  687. if(emitNonSemanticShaderDebugInfo && debugId[type->getResultId()] == 0) {
  688. assert(sourceLang == spv::SourceLanguage::HLSL);
  689. assert(getTypeClass(returnType) == Op::OpTypeVoid && paramTypes.size() == 0);
  690. Id debugTypeId = makeDebugFunctionType(returnType, {});
  691. debugId[type->getResultId()] = debugTypeId;
  692. }
  693. return type->getResultId();
  694. }
  695. }
  696. // not found, make it
  697. Id typeId = getUniqueId();
  698. type = new Instruction(typeId, NoType, Op::OpTypeFunction);
  699. type->reserveOperands(paramTypes.size() + 1);
  700. type->addIdOperand(returnType);
  701. for (int p = 0; p < (int)paramTypes.size(); ++p)
  702. type->addIdOperand(paramTypes[p]);
  703. groupedTypes[enumCast(Op::OpTypeFunction)].push_back(type);
  704. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  705. module.mapInstruction(type);
  706. // make debug type and map it
  707. if (emitNonSemanticShaderDebugInfo) {
  708. Id debugTypeId = makeDebugFunctionType(returnType, paramTypes);
  709. debugId[typeId] = debugTypeId;
  710. }
  711. return type->getResultId();
  712. }
  713. Id Builder::makeDebugFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  714. {
  715. assert(debugId[returnType] != 0);
  716. Id typeId = getUniqueId();
  717. auto type = new Instruction(typeId, makeVoidType(), Op::OpExtInst);
  718. type->reserveOperands(paramTypes.size() + 4);
  719. type->addIdOperand(nonSemanticShaderDebugInfo);
  720. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeFunction);
  721. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  722. type->addIdOperand(debugId[returnType]);
  723. for (auto const paramType : paramTypes) {
  724. if (isPointerType(paramType) || isArrayType(paramType)) {
  725. type->addIdOperand(debugId[getContainedTypeId(paramType)]);
  726. }
  727. else {
  728. type->addIdOperand(debugId[paramType]);
  729. }
  730. }
  731. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  732. module.mapInstruction(type);
  733. return typeId;
  734. }
  735. Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled,
  736. ImageFormat format)
  737. {
  738. assert(sampled == 1 || sampled == 2);
  739. // try to find it
  740. Instruction* type;
  741. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeImage)].size(); ++t) {
  742. type = groupedTypes[enumCast(Op::OpTypeImage)][t];
  743. if (type->getIdOperand(0) == sampledType &&
  744. type->getImmediateOperand(1) == (unsigned int)dim &&
  745. type->getImmediateOperand(2) == ( depth ? 1u : 0u) &&
  746. type->getImmediateOperand(3) == (arrayed ? 1u : 0u) &&
  747. type->getImmediateOperand(4) == ( ms ? 1u : 0u) &&
  748. type->getImmediateOperand(5) == sampled &&
  749. type->getImmediateOperand(6) == (unsigned int)format)
  750. return type->getResultId();
  751. }
  752. // not found, make it
  753. type = new Instruction(getUniqueId(), NoType, Op::OpTypeImage);
  754. type->reserveOperands(7);
  755. type->addIdOperand(sampledType);
  756. type->addImmediateOperand( dim);
  757. type->addImmediateOperand( depth ? 1 : 0);
  758. type->addImmediateOperand(arrayed ? 1 : 0);
  759. type->addImmediateOperand( ms ? 1 : 0);
  760. type->addImmediateOperand(sampled);
  761. type->addImmediateOperand((unsigned int)format);
  762. groupedTypes[enumCast(Op::OpTypeImage)].push_back(type);
  763. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  764. module.mapInstruction(type);
  765. // deal with capabilities
  766. switch (dim) {
  767. case Dim::Buffer:
  768. if (sampled == 1)
  769. addCapability(Capability::SampledBuffer);
  770. else
  771. addCapability(Capability::ImageBuffer);
  772. break;
  773. case Dim::Dim1D:
  774. if (sampled == 1)
  775. addCapability(Capability::Sampled1D);
  776. else
  777. addCapability(Capability::Image1D);
  778. break;
  779. case Dim::Cube:
  780. if (arrayed) {
  781. if (sampled == 1)
  782. addCapability(Capability::SampledCubeArray);
  783. else
  784. addCapability(Capability::ImageCubeArray);
  785. }
  786. break;
  787. case Dim::Rect:
  788. if (sampled == 1)
  789. addCapability(Capability::SampledRect);
  790. else
  791. addCapability(Capability::ImageRect);
  792. break;
  793. case Dim::SubpassData:
  794. addCapability(Capability::InputAttachment);
  795. break;
  796. default:
  797. break;
  798. }
  799. if (ms) {
  800. if (sampled == 2) {
  801. // Images used with subpass data are not storage
  802. // images, so don't require the capability for them.
  803. if (dim != Dim::SubpassData)
  804. addCapability(Capability::StorageImageMultisample);
  805. if (arrayed)
  806. addCapability(Capability::ImageMSArray);
  807. }
  808. }
  809. if (emitNonSemanticShaderDebugInfo)
  810. {
  811. auto TypeName = [&dim]() -> char const* {
  812. switch (dim) {
  813. case Dim::Dim1D: return "type.1d.image";
  814. case Dim::Dim2D: return "type.2d.image";
  815. case Dim::Dim3D: return "type.3d.image";
  816. case Dim::Cube: return "type.cube.image";
  817. default: return "type.image";
  818. }
  819. };
  820. auto const debugResultId = makeCompositeDebugType({}, TypeName(), NonSemanticShaderDebugInfo100Class, true);
  821. debugId[type->getResultId()] = debugResultId;
  822. }
  823. return type->getResultId();
  824. }
  825. Id Builder::makeSampledImageType(Id imageType)
  826. {
  827. // try to find it
  828. Instruction* type;
  829. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeSampledImage)].size(); ++t) {
  830. type = groupedTypes[enumCast(Op::OpTypeSampledImage)][t];
  831. if (type->getIdOperand(0) == imageType)
  832. return type->getResultId();
  833. }
  834. // not found, make it
  835. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampledImage);
  836. type->addIdOperand(imageType);
  837. groupedTypes[enumCast(Op::OpTypeSampledImage)].push_back(type);
  838. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  839. module.mapInstruction(type);
  840. if (emitNonSemanticShaderDebugInfo)
  841. {
  842. auto const debugResultId = makeCompositeDebugType({}, "type.sampled.image", NonSemanticShaderDebugInfo100Class, true);
  843. debugId[type->getResultId()] = debugResultId;
  844. }
  845. return type->getResultId();
  846. }
  847. Id Builder::makeDebugInfoNone()
  848. {
  849. if (debugInfoNone != 0)
  850. return debugInfoNone;
  851. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  852. inst->reserveOperands(2);
  853. inst->addIdOperand(nonSemanticShaderDebugInfo);
  854. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugInfoNone);
  855. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  856. module.mapInstruction(inst);
  857. debugInfoNone = inst->getResultId();
  858. return debugInfoNone;
  859. }
  860. Id Builder::makeBoolDebugType(int const size)
  861. {
  862. // try to find it
  863. Instruction* type;
  864. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  865. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  866. if (type->getIdOperand(0) == getStringId("bool") &&
  867. type->getIdOperand(1) == static_cast<unsigned int>(size) &&
  868. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Boolean)
  869. return type->getResultId();
  870. }
  871. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  872. type->reserveOperands(6);
  873. type->addIdOperand(nonSemanticShaderDebugInfo);
  874. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  875. type->addIdOperand(getStringId("bool")); // name id
  876. type->addIdOperand(makeUintConstant(size)); // size id
  877. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Boolean)); // encoding id
  878. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  879. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  880. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  881. module.mapInstruction(type);
  882. return type->getResultId();
  883. }
  884. Id Builder::makeIntegerDebugType(int const width, bool const hasSign)
  885. {
  886. const char* typeName = nullptr;
  887. switch (width) {
  888. case 8: typeName = hasSign ? "int8_t" : "uint8_t"; break;
  889. case 16: typeName = hasSign ? "int16_t" : "uint16_t"; break;
  890. case 64: typeName = hasSign ? "int64_t" : "uint64_t"; break;
  891. default: typeName = hasSign ? "int" : "uint";
  892. }
  893. auto nameId = getStringId(typeName);
  894. // try to find it
  895. Instruction* type;
  896. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  897. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  898. if (type->getIdOperand(0) == nameId &&
  899. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  900. type->getIdOperand(2) == (hasSign ? NonSemanticShaderDebugInfo100Signed : NonSemanticShaderDebugInfo100Unsigned))
  901. return type->getResultId();
  902. }
  903. // not found, make it
  904. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  905. type->reserveOperands(6);
  906. type->addIdOperand(nonSemanticShaderDebugInfo);
  907. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  908. type->addIdOperand(nameId); // name id
  909. type->addIdOperand(makeUintConstant(width)); // size id
  910. if(hasSign == true) {
  911. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Signed)); // encoding id
  912. } else {
  913. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Unsigned)); // encoding id
  914. }
  915. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  916. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  917. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  918. module.mapInstruction(type);
  919. return type->getResultId();
  920. }
  921. Id Builder::makeFloatDebugType(int const width)
  922. {
  923. const char* typeName = nullptr;
  924. switch (width) {
  925. case 16: typeName = "float16_t"; break;
  926. case 64: typeName = "double"; break;
  927. default: typeName = "float"; break;
  928. }
  929. auto nameId = getStringId(typeName);
  930. // try to find it
  931. Instruction* type;
  932. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  933. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  934. if (type->getIdOperand(0) == nameId &&
  935. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  936. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Float)
  937. return type->getResultId();
  938. }
  939. // not found, make it
  940. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  941. type->reserveOperands(6);
  942. type->addIdOperand(nonSemanticShaderDebugInfo);
  943. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  944. type->addIdOperand(nameId); // name id
  945. type->addIdOperand(makeUintConstant(width)); // size id
  946. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Float)); // encoding id
  947. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  948. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  949. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  950. module.mapInstruction(type);
  951. return type->getResultId();
  952. }
  953. Id Builder::makeSequentialDebugType(Id const baseType, Id const componentCount, NonSemanticShaderDebugInfo100Instructions const sequenceType)
  954. {
  955. assert(sequenceType == NonSemanticShaderDebugInfo100DebugTypeArray ||
  956. sequenceType == NonSemanticShaderDebugInfo100DebugTypeVector);
  957. // try to find it
  958. Instruction* type;
  959. for (int t = 0; t < (int)groupedDebugTypes[sequenceType].size(); ++t) {
  960. type = groupedDebugTypes[sequenceType][t];
  961. if (type->getIdOperand(0) == baseType &&
  962. type->getIdOperand(1) == makeUintConstant(componentCount))
  963. return type->getResultId();
  964. }
  965. // not found, make it
  966. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  967. type->reserveOperands(4);
  968. type->addIdOperand(nonSemanticShaderDebugInfo);
  969. type->addImmediateOperand(sequenceType);
  970. type->addIdOperand(debugId[baseType]); // base type
  971. type->addIdOperand(componentCount); // component count
  972. groupedDebugTypes[sequenceType].push_back(type);
  973. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  974. module.mapInstruction(type);
  975. return type->getResultId();
  976. }
  977. Id Builder::makeArrayDebugType(Id const baseType, Id const componentCount)
  978. {
  979. return makeSequentialDebugType(baseType, componentCount, NonSemanticShaderDebugInfo100DebugTypeArray);
  980. }
  981. Id Builder::makeVectorDebugType(Id const baseType, int const componentCount)
  982. {
  983. return makeSequentialDebugType(baseType, makeUintConstant(componentCount), NonSemanticShaderDebugInfo100DebugTypeVector);
  984. }
  985. Id Builder::makeMatrixDebugType(Id const vectorType, int const vectorCount, bool columnMajor)
  986. {
  987. // try to find it
  988. Instruction* type;
  989. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].size(); ++t) {
  990. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix][t];
  991. if (type->getIdOperand(0) == vectorType &&
  992. type->getIdOperand(1) == makeUintConstant(vectorCount))
  993. return type->getResultId();
  994. }
  995. // not found, make it
  996. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  997. type->reserveOperands(5);
  998. type->addIdOperand(nonSemanticShaderDebugInfo);
  999. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMatrix);
  1000. type->addIdOperand(debugId[vectorType]); // vector type id
  1001. type->addIdOperand(makeUintConstant(vectorCount)); // component count id
  1002. type->addIdOperand(makeBoolConstant(columnMajor)); // column-major id
  1003. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].push_back(type);
  1004. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1005. module.mapInstruction(type);
  1006. return type->getResultId();
  1007. }
  1008. Id Builder::makeMemberDebugType(Id const memberType, DebugTypeLoc const& debugTypeLoc)
  1009. {
  1010. assert(debugId[memberType] != 0);
  1011. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1012. type->reserveOperands(10);
  1013. type->addIdOperand(nonSemanticShaderDebugInfo);
  1014. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMember);
  1015. type->addIdOperand(getStringId(debugTypeLoc.name)); // name id
  1016. type->addIdOperand(debugId[memberType]); // type id
  1017. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1018. type->addIdOperand(makeUintConstant(debugTypeLoc.line)); // line id TODO: currentLine is always zero
  1019. type->addIdOperand(makeUintConstant(debugTypeLoc.column)); // TODO: column id
  1020. type->addIdOperand(makeUintConstant(0)); // TODO: offset id
  1021. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1022. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1023. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMember].push_back(type);
  1024. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1025. module.mapInstruction(type);
  1026. return type->getResultId();
  1027. }
  1028. // Note: To represent a source language opaque type, this instruction must have no Members operands, Size operand must be
  1029. // DebugInfoNone, and Name must start with @ to avoid clashes with user defined names.
  1030. Id Builder::makeCompositeDebugType(std::vector<Id> const& memberTypes, char const*const name,
  1031. NonSemanticShaderDebugInfo100DebugCompositeType const tag, bool const isOpaqueType)
  1032. {
  1033. // Create the debug member types.
  1034. std::vector<Id> memberDebugTypes;
  1035. for(auto const memberType : memberTypes) {
  1036. assert(debugTypeLocs.find(memberType) != debugTypeLocs.end());
  1037. // There _should_ be debug types for all the member types but currently buffer references
  1038. // do not have member debug info generated.
  1039. if (debugId[memberType])
  1040. memberDebugTypes.emplace_back(makeMemberDebugType(memberType, debugTypeLocs[memberType]));
  1041. // TODO: Need to rethink this method of passing location information.
  1042. // debugTypeLocs.erase(memberType);
  1043. }
  1044. // Create The structure debug type.
  1045. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1046. type->reserveOperands(memberDebugTypes.size() + 11);
  1047. type->addIdOperand(nonSemanticShaderDebugInfo);
  1048. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
  1049. type->addIdOperand(getStringId(name)); // name id
  1050. type->addIdOperand(makeUintConstant(tag)); // tag id
  1051. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1052. type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1053. type->addIdOperand(makeUintConstant(0)); // TODO: column id
  1054. type->addIdOperand(makeDebugCompilationUnit()); // scope id
  1055. if(isOpaqueType == true) {
  1056. // Prepend '@' to opaque types.
  1057. type->addIdOperand(getStringId('@' + std::string(name))); // linkage name id
  1058. type->addIdOperand(makeDebugInfoNone()); // size id
  1059. } else {
  1060. type->addIdOperand(getStringId(name)); // linkage name id
  1061. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1062. }
  1063. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1064. assert(isOpaqueType == false || (isOpaqueType == true && memberDebugTypes.empty()));
  1065. for(auto const memberDebugType : memberDebugTypes) {
  1066. type->addIdOperand(memberDebugType);
  1067. }
  1068. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
  1069. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1070. module.mapInstruction(type);
  1071. return type->getResultId();
  1072. }
  1073. Id Builder::makePointerDebugType(StorageClass storageClass, Id const baseType)
  1074. {
  1075. const Id debugBaseType = debugId[baseType];
  1076. if (!debugBaseType) {
  1077. return makeDebugInfoNone();
  1078. }
  1079. const Id scID = makeUintConstant(storageClass);
  1080. for (Instruction* otherType : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer]) {
  1081. if (otherType->getIdOperand(2) == debugBaseType &&
  1082. otherType->getIdOperand(3) == scID) {
  1083. return otherType->getResultId();
  1084. }
  1085. }
  1086. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1087. type->reserveOperands(5);
  1088. type->addIdOperand(nonSemanticShaderDebugInfo);
  1089. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1090. type->addIdOperand(debugBaseType);
  1091. type->addIdOperand(scID);
  1092. type->addIdOperand(makeUintConstant(0));
  1093. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1094. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1095. module.mapInstruction(type);
  1096. return type->getResultId();
  1097. }
  1098. // Emit a OpExtInstWithForwardRefsKHR nonsemantic instruction for a pointer debug type
  1099. // where we don't have the pointee yet. Since we don't have the pointee yet, it just
  1100. // points to itself and we rely on patching it later.
  1101. Id Builder::makeForwardPointerDebugType(StorageClass storageClass)
  1102. {
  1103. const Id scID = makeUintConstant(storageClass);
  1104. this->addExtension(spv::E_SPV_KHR_relaxed_extended_instruction);
  1105. Instruction *type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInstWithForwardRefsKHR);
  1106. type->addIdOperand(nonSemanticShaderDebugInfo);
  1107. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1108. type->addIdOperand(type->getResultId());
  1109. type->addIdOperand(scID);
  1110. type->addIdOperand(makeUintConstant(0));
  1111. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1112. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1113. module.mapInstruction(type);
  1114. return type->getResultId();
  1115. }
  1116. Id Builder::makeDebugSource(const Id fileName) {
  1117. if (debugSourceId.find(fileName) != debugSourceId.end())
  1118. return debugSourceId[fileName];
  1119. spv::Id resultId = getUniqueId();
  1120. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1121. sourceInst->reserveOperands(3);
  1122. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1123. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSource);
  1124. sourceInst->addIdOperand(fileName);
  1125. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1126. module.mapInstruction(sourceInst);
  1127. if (emitNonSemanticShaderDebugSource) {
  1128. const int maxWordCount = 0xFFFF;
  1129. const int opSourceWordCount = 4;
  1130. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  1131. auto processDebugSource = [&](std::string source) {
  1132. if (source.size() > 0) {
  1133. int nextByte = 0;
  1134. while ((int)source.size() - nextByte > 0) {
  1135. auto subString = source.substr(nextByte, nonNullBytesPerInstruction);
  1136. auto sourceId = getStringId(subString);
  1137. if (nextByte == 0) {
  1138. // DebugSource
  1139. sourceInst->addIdOperand(sourceId);
  1140. } else {
  1141. // DebugSourceContinued
  1142. Instruction* sourceContinuedInst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1143. sourceContinuedInst->reserveOperands(2);
  1144. sourceContinuedInst->addIdOperand(nonSemanticShaderDebugInfo);
  1145. sourceContinuedInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSourceContinued);
  1146. sourceContinuedInst->addIdOperand(sourceId);
  1147. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceContinuedInst));
  1148. module.mapInstruction(sourceContinuedInst);
  1149. }
  1150. nextByte += nonNullBytesPerInstruction;
  1151. }
  1152. } else {
  1153. auto sourceId = getStringId(source);
  1154. sourceInst->addIdOperand(sourceId);
  1155. }
  1156. };
  1157. if (fileName == mainFileId) {
  1158. processDebugSource(sourceText);
  1159. } else {
  1160. auto incItr = includeFiles.find(fileName);
  1161. if (incItr != includeFiles.end()) {
  1162. processDebugSource(*incItr->second);
  1163. } else {
  1164. // We omit the optional source text item if not available in glslang
  1165. }
  1166. }
  1167. }
  1168. debugSourceId[fileName] = resultId;
  1169. return resultId;
  1170. }
  1171. Id Builder::makeDebugCompilationUnit() {
  1172. if (nonSemanticShaderCompilationUnitId != 0)
  1173. return nonSemanticShaderCompilationUnitId;
  1174. spv::Id resultId = getUniqueId();
  1175. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1176. sourceInst->reserveOperands(6);
  1177. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1178. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugCompilationUnit);
  1179. sourceInst->addIdOperand(makeUintConstant(1)); // TODO(greg-lunarg): Get rid of magic number
  1180. sourceInst->addIdOperand(makeUintConstant(4)); // TODO(greg-lunarg): Get rid of magic number
  1181. sourceInst->addIdOperand(makeDebugSource(mainFileId));
  1182. sourceInst->addIdOperand(makeUintConstant(sourceLang));
  1183. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1184. module.mapInstruction(sourceInst);
  1185. nonSemanticShaderCompilationUnitId = resultId;
  1186. // We can reasonably assume that makeDebugCompilationUnit will be called before any of
  1187. // debug-scope stack. Function scopes and lexical scopes will occur afterward.
  1188. assert(currentDebugScopeId.empty());
  1189. currentDebugScopeId.push(nonSemanticShaderCompilationUnitId);
  1190. return resultId;
  1191. }
  1192. Id Builder::createDebugGlobalVariable(Id const type, char const*const name, Id const variable)
  1193. {
  1194. assert(type != 0);
  1195. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1196. inst->reserveOperands(11);
  1197. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1198. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugGlobalVariable);
  1199. inst->addIdOperand(getStringId(name)); // name id
  1200. inst->addIdOperand(type); // type id
  1201. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1202. inst->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1203. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1204. inst->addIdOperand(makeDebugCompilationUnit()); // scope id
  1205. inst->addIdOperand(getStringId(name)); // linkage name id
  1206. inst->addIdOperand(variable); // variable id
  1207. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsDefinition)); // flags id
  1208. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1209. module.mapInstruction(inst);
  1210. return inst->getResultId();
  1211. }
  1212. Id Builder::createDebugLocalVariable(Id type, char const*const name, size_t const argNumber)
  1213. {
  1214. assert(name != nullptr);
  1215. assert(!currentDebugScopeId.empty());
  1216. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1217. inst->reserveOperands(9);
  1218. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1219. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLocalVariable);
  1220. inst->addIdOperand(getStringId(name)); // name id
  1221. inst->addIdOperand(type); // type id
  1222. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1223. inst->addIdOperand(makeUintConstant(currentLine)); // line id
  1224. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1225. inst->addIdOperand(currentDebugScopeId.top()); // scope id
  1226. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsLocal)); // flags id
  1227. if(argNumber != 0) {
  1228. inst->addIdOperand(makeUintConstant(argNumber));
  1229. }
  1230. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1231. module.mapInstruction(inst);
  1232. return inst->getResultId();
  1233. }
  1234. Id Builder::makeDebugExpression()
  1235. {
  1236. if (debugExpression != 0)
  1237. return debugExpression;
  1238. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1239. inst->reserveOperands(2);
  1240. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1241. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugExpression);
  1242. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1243. module.mapInstruction(inst);
  1244. debugExpression = inst->getResultId();
  1245. return debugExpression;
  1246. }
  1247. Id Builder::makeDebugDeclare(Id const debugLocalVariable, Id const pointer)
  1248. {
  1249. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1250. inst->reserveOperands(5);
  1251. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1252. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugDeclare);
  1253. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1254. inst->addIdOperand(pointer); // pointer to local variable id
  1255. inst->addIdOperand(makeDebugExpression()); // expression id
  1256. addInstruction(std::unique_ptr<Instruction>(inst));
  1257. return inst->getResultId();
  1258. }
  1259. Id Builder::makeDebugValue(Id const debugLocalVariable, Id const value)
  1260. {
  1261. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1262. inst->reserveOperands(5);
  1263. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1264. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugValue);
  1265. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1266. inst->addIdOperand(value); // value of local variable id
  1267. inst->addIdOperand(makeDebugExpression()); // expression id
  1268. addInstruction(std::unique_ptr<Instruction>(inst));
  1269. return inst->getResultId();
  1270. }
  1271. Id Builder::makeAccelerationStructureType()
  1272. {
  1273. Instruction *type;
  1274. if (groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].size() == 0) {
  1275. type = new Instruction(getUniqueId(), NoType, Op::OpTypeAccelerationStructureKHR);
  1276. groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].push_back(type);
  1277. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1278. module.mapInstruction(type);
  1279. if (emitNonSemanticShaderDebugInfo) {
  1280. spv::Id debugType = makeCompositeDebugType({}, "accelerationStructure", NonSemanticShaderDebugInfo100Structure, true);
  1281. debugId[type->getResultId()] = debugType;
  1282. }
  1283. } else {
  1284. type = groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].back();
  1285. }
  1286. return type->getResultId();
  1287. }
  1288. Id Builder::makeRayQueryType()
  1289. {
  1290. Instruction *type;
  1291. if (groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].size() == 0) {
  1292. type = new Instruction(getUniqueId(), NoType, Op::OpTypeRayQueryKHR);
  1293. groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].push_back(type);
  1294. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1295. module.mapInstruction(type);
  1296. if (emitNonSemanticShaderDebugInfo) {
  1297. spv::Id debugType = makeCompositeDebugType({}, "rayQuery", NonSemanticShaderDebugInfo100Structure, true);
  1298. debugId[type->getResultId()] = debugType;
  1299. }
  1300. } else {
  1301. type = groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].back();
  1302. }
  1303. return type->getResultId();
  1304. }
  1305. Id Builder::makeHitObjectNVType()
  1306. {
  1307. Instruction *type;
  1308. if (groupedTypes[enumCast(Op::OpTypeHitObjectNV)].size() == 0) {
  1309. type = new Instruction(getUniqueId(), NoType, Op::OpTypeHitObjectNV);
  1310. groupedTypes[enumCast(Op::OpTypeHitObjectNV)].push_back(type);
  1311. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1312. module.mapInstruction(type);
  1313. } else {
  1314. type = groupedTypes[enumCast(Op::OpTypeHitObjectNV)].back();
  1315. }
  1316. return type->getResultId();
  1317. }
  1318. Id Builder::getDerefTypeId(Id resultId) const
  1319. {
  1320. Id typeId = getTypeId(resultId);
  1321. assert(isPointerType(typeId));
  1322. return module.getInstruction(typeId)->getIdOperand(1);
  1323. }
  1324. Op Builder::getMostBasicTypeClass(Id typeId) const
  1325. {
  1326. Instruction* instr = module.getInstruction(typeId);
  1327. Op typeClass = instr->getOpCode();
  1328. switch (typeClass)
  1329. {
  1330. case Op::OpTypeVector:
  1331. case Op::OpTypeMatrix:
  1332. case Op::OpTypeArray:
  1333. case Op::OpTypeRuntimeArray:
  1334. return getMostBasicTypeClass(instr->getIdOperand(0));
  1335. case Op::OpTypePointer:
  1336. return getMostBasicTypeClass(instr->getIdOperand(1));
  1337. default:
  1338. return typeClass;
  1339. }
  1340. }
  1341. unsigned int Builder::getNumTypeConstituents(Id typeId) const
  1342. {
  1343. Instruction* instr = module.getInstruction(typeId);
  1344. switch (instr->getOpCode())
  1345. {
  1346. case Op::OpTypeBool:
  1347. case Op::OpTypeInt:
  1348. case Op::OpTypeFloat:
  1349. case Op::OpTypePointer:
  1350. return 1;
  1351. case Op::OpTypeVector:
  1352. case Op::OpTypeMatrix:
  1353. return instr->getImmediateOperand(1);
  1354. case Op::OpTypeCooperativeVectorNV:
  1355. case Op::OpTypeArray:
  1356. {
  1357. Id lengthId = instr->getIdOperand(1);
  1358. return module.getInstruction(lengthId)->getImmediateOperand(0);
  1359. }
  1360. case Op::OpTypeStruct:
  1361. return instr->getNumOperands();
  1362. case Op::OpTypeCooperativeMatrixKHR:
  1363. case Op::OpTypeCooperativeMatrixNV:
  1364. // has only one constituent when used with OpCompositeConstruct.
  1365. return 1;
  1366. default:
  1367. assert(0);
  1368. return 1;
  1369. }
  1370. }
  1371. // Return the lowest-level type of scalar that an homogeneous composite is made out of.
  1372. // Typically, this is just to find out if something is made out of ints or floats.
  1373. // However, it includes returning a structure, if say, it is an array of structure.
  1374. Id Builder::getScalarTypeId(Id typeId) const
  1375. {
  1376. Instruction* instr = module.getInstruction(typeId);
  1377. Op typeClass = instr->getOpCode();
  1378. switch (typeClass)
  1379. {
  1380. case Op::OpTypeVoid:
  1381. case Op::OpTypeBool:
  1382. case Op::OpTypeInt:
  1383. case Op::OpTypeFloat:
  1384. case Op::OpTypeStruct:
  1385. return instr->getResultId();
  1386. case Op::OpTypeVector:
  1387. case Op::OpTypeMatrix:
  1388. case Op::OpTypeArray:
  1389. case Op::OpTypeRuntimeArray:
  1390. case Op::OpTypePointer:
  1391. case Op::OpTypeCooperativeVectorNV:
  1392. return getScalarTypeId(getContainedTypeId(typeId));
  1393. default:
  1394. assert(0);
  1395. return NoResult;
  1396. }
  1397. }
  1398. // Return the type of 'member' of a composite.
  1399. Id Builder::getContainedTypeId(Id typeId, int member) const
  1400. {
  1401. Instruction* instr = module.getInstruction(typeId);
  1402. Op typeClass = instr->getOpCode();
  1403. switch (typeClass)
  1404. {
  1405. case Op::OpTypeVector:
  1406. case Op::OpTypeMatrix:
  1407. case Op::OpTypeArray:
  1408. case Op::OpTypeRuntimeArray:
  1409. case Op::OpTypeCooperativeMatrixKHR:
  1410. case Op::OpTypeCooperativeMatrixNV:
  1411. case Op::OpTypeCooperativeVectorNV:
  1412. return instr->getIdOperand(0);
  1413. case Op::OpTypePointer:
  1414. return instr->getIdOperand(1);
  1415. case Op::OpTypeStruct:
  1416. return instr->getIdOperand(member);
  1417. default:
  1418. assert(0);
  1419. return NoResult;
  1420. }
  1421. }
  1422. // Figure out the final resulting type of the access chain.
  1423. Id Builder::getResultingAccessChainType() const
  1424. {
  1425. assert(accessChain.base != NoResult);
  1426. Id typeId = getTypeId(accessChain.base);
  1427. assert(isPointerType(typeId));
  1428. typeId = getContainedTypeId(typeId);
  1429. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  1430. if (isStructType(typeId)) {
  1431. assert(isConstantScalar(accessChain.indexChain[i]));
  1432. typeId = getContainedTypeId(typeId, getConstantScalar(accessChain.indexChain[i]));
  1433. } else
  1434. typeId = getContainedTypeId(typeId, accessChain.indexChain[i]);
  1435. }
  1436. return typeId;
  1437. }
  1438. // Return the immediately contained type of a given composite type.
  1439. Id Builder::getContainedTypeId(Id typeId) const
  1440. {
  1441. return getContainedTypeId(typeId, 0);
  1442. }
  1443. // Returns true if 'typeId' is or contains a scalar type declared with 'typeOp'
  1444. // of width 'width'. The 'width' is only consumed for int and float types.
  1445. // Returns false otherwise.
  1446. bool Builder::containsType(Id typeId, spv::Op typeOp, unsigned int width) const
  1447. {
  1448. const Instruction& instr = *module.getInstruction(typeId);
  1449. Op typeClass = instr.getOpCode();
  1450. switch (typeClass)
  1451. {
  1452. case Op::OpTypeInt:
  1453. case Op::OpTypeFloat:
  1454. return typeClass == typeOp && instr.getImmediateOperand(0) == width;
  1455. case Op::OpTypeStruct:
  1456. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1457. if (containsType(instr.getIdOperand(m), typeOp, width))
  1458. return true;
  1459. }
  1460. return false;
  1461. case Op::OpTypePointer:
  1462. return false;
  1463. case Op::OpTypeVector:
  1464. case Op::OpTypeMatrix:
  1465. case Op::OpTypeArray:
  1466. case Op::OpTypeRuntimeArray:
  1467. return containsType(getContainedTypeId(typeId), typeOp, width);
  1468. default:
  1469. return typeClass == typeOp;
  1470. }
  1471. }
  1472. // return true if the type is a pointer to PhysicalStorageBufferEXT or an
  1473. // contains such a pointer. These require restrict/aliased decorations.
  1474. bool Builder::containsPhysicalStorageBufferOrArray(Id typeId) const
  1475. {
  1476. const Instruction& instr = *module.getInstruction(typeId);
  1477. Op typeClass = instr.getOpCode();
  1478. switch (typeClass)
  1479. {
  1480. case Op::OpTypePointer:
  1481. return getTypeStorageClass(typeId) == StorageClass::PhysicalStorageBufferEXT;
  1482. case Op::OpTypeArray:
  1483. return containsPhysicalStorageBufferOrArray(getContainedTypeId(typeId));
  1484. case Op::OpTypeStruct:
  1485. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1486. if (containsPhysicalStorageBufferOrArray(instr.getIdOperand(m)))
  1487. return true;
  1488. }
  1489. return false;
  1490. default:
  1491. return false;
  1492. }
  1493. }
  1494. // See if a scalar constant of this type has already been created, so it
  1495. // can be reused rather than duplicated. (Required by the specification).
  1496. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned value)
  1497. {
  1498. Instruction* constant;
  1499. for (int i = 0; i < (int)groupedConstants[enumCast(typeClass)].size(); ++i) {
  1500. constant = groupedConstants[enumCast(typeClass)][i];
  1501. if (constant->getOpCode() == opcode &&
  1502. constant->getTypeId() == typeId &&
  1503. constant->getImmediateOperand(0) == value)
  1504. return constant->getResultId();
  1505. }
  1506. return 0;
  1507. }
  1508. // Version of findScalarConstant (see above) for scalars that take two operands (e.g. a 'double' or 'int64').
  1509. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned v1, unsigned v2)
  1510. {
  1511. Instruction* constant;
  1512. for (int i = 0; i < (int)groupedConstants[enumCast(typeClass)].size(); ++i) {
  1513. constant = groupedConstants[enumCast(typeClass)][i];
  1514. if (constant->getOpCode() == opcode &&
  1515. constant->getTypeId() == typeId &&
  1516. constant->getImmediateOperand(0) == v1 &&
  1517. constant->getImmediateOperand(1) == v2)
  1518. return constant->getResultId();
  1519. }
  1520. return 0;
  1521. }
  1522. // Return true if consuming 'opcode' means consuming a constant.
  1523. // "constant" here means after final transform to executable code,
  1524. // the value consumed will be a constant, so includes specialization.
  1525. bool Builder::isConstantOpCode(Op opcode) const
  1526. {
  1527. switch (opcode) {
  1528. case Op::OpUndef:
  1529. case Op::OpConstantTrue:
  1530. case Op::OpConstantFalse:
  1531. case Op::OpConstant:
  1532. case Op::OpConstantComposite:
  1533. case Op::OpConstantCompositeReplicateEXT:
  1534. case Op::OpConstantSampler:
  1535. case Op::OpConstantNull:
  1536. case Op::OpSpecConstantTrue:
  1537. case Op::OpSpecConstantFalse:
  1538. case Op::OpSpecConstant:
  1539. case Op::OpSpecConstantComposite:
  1540. case Op::OpSpecConstantCompositeReplicateEXT:
  1541. case Op::OpSpecConstantOp:
  1542. return true;
  1543. default:
  1544. return false;
  1545. }
  1546. }
  1547. // Return true if consuming 'opcode' means consuming a specialization constant.
  1548. bool Builder::isSpecConstantOpCode(Op opcode) const
  1549. {
  1550. switch (opcode) {
  1551. case Op::OpSpecConstantTrue:
  1552. case Op::OpSpecConstantFalse:
  1553. case Op::OpSpecConstant:
  1554. case Op::OpSpecConstantComposite:
  1555. case Op::OpSpecConstantOp:
  1556. case Op::OpSpecConstantCompositeReplicateEXT:
  1557. return true;
  1558. default:
  1559. return false;
  1560. }
  1561. }
  1562. Id Builder::makeNullConstant(Id typeId)
  1563. {
  1564. Instruction* constant;
  1565. // See if we already made it.
  1566. Id existing = NoResult;
  1567. for (int i = 0; i < (int)nullConstants.size(); ++i) {
  1568. constant = nullConstants[i];
  1569. if (constant->getTypeId() == typeId)
  1570. existing = constant->getResultId();
  1571. }
  1572. if (existing != NoResult)
  1573. return existing;
  1574. // Make it
  1575. Instruction* c = new Instruction(getUniqueId(), typeId, Op::OpConstantNull);
  1576. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1577. nullConstants.push_back(c);
  1578. module.mapInstruction(c);
  1579. return c->getResultId();
  1580. }
  1581. Id Builder::makeBoolConstant(bool b, bool specConstant)
  1582. {
  1583. Id typeId = makeBoolType();
  1584. Instruction* constant;
  1585. Op opcode = specConstant ? (b ? Op::OpSpecConstantTrue : Op::OpSpecConstantFalse) : (b ? Op::OpConstantTrue : Op::OpConstantFalse);
  1586. // See if we already made it. Applies only to regular constants, because specialization constants
  1587. // must remain distinct for the purpose of applying a SpecId decoration.
  1588. if (! specConstant) {
  1589. Id existing = 0;
  1590. for (int i = 0; i < (int)groupedConstants[enumCast(Op::OpTypeBool)].size(); ++i) {
  1591. constant = groupedConstants[enumCast(Op::OpTypeBool)][i];
  1592. if (constant->getTypeId() == typeId && constant->getOpCode() == opcode)
  1593. existing = constant->getResultId();
  1594. }
  1595. if (existing)
  1596. return existing;
  1597. }
  1598. // Make it
  1599. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1600. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1601. groupedConstants[enumCast(Op::OpTypeBool)].push_back(c);
  1602. module.mapInstruction(c);
  1603. return c->getResultId();
  1604. }
  1605. Id Builder::makeIntConstant(Id typeId, unsigned value, bool specConstant)
  1606. {
  1607. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1608. // See if we already made it. Applies only to regular constants, because specialization constants
  1609. // must remain distinct for the purpose of applying a SpecId decoration.
  1610. if (! specConstant) {
  1611. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, value);
  1612. if (existing)
  1613. return existing;
  1614. }
  1615. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1616. c->addImmediateOperand(value);
  1617. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1618. groupedConstants[enumCast(Op::OpTypeInt)].push_back(c);
  1619. module.mapInstruction(c);
  1620. return c->getResultId();
  1621. }
  1622. Id Builder::makeInt64Constant(Id typeId, unsigned long long value, bool specConstant)
  1623. {
  1624. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1625. unsigned op1 = value & 0xFFFFFFFF;
  1626. unsigned op2 = value >> 32;
  1627. // See if we already made it. Applies only to regular constants, because specialization constants
  1628. // must remain distinct for the purpose of applying a SpecId decoration.
  1629. if (! specConstant) {
  1630. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, op1, op2);
  1631. if (existing)
  1632. return existing;
  1633. }
  1634. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1635. c->reserveOperands(2);
  1636. c->addImmediateOperand(op1);
  1637. c->addImmediateOperand(op2);
  1638. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1639. groupedConstants[enumCast(Op::OpTypeInt)].push_back(c);
  1640. module.mapInstruction(c);
  1641. return c->getResultId();
  1642. }
  1643. Id Builder::makeFloatConstant(float f, bool specConstant)
  1644. {
  1645. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1646. Id typeId = makeFloatType(32);
  1647. union { float fl; unsigned int ui; } u;
  1648. u.fl = f;
  1649. unsigned value = u.ui;
  1650. // See if we already made it. Applies only to regular constants, because specialization constants
  1651. // must remain distinct for the purpose of applying a SpecId decoration.
  1652. if (! specConstant) {
  1653. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1654. if (existing)
  1655. return existing;
  1656. }
  1657. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1658. c->addImmediateOperand(value);
  1659. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1660. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1661. module.mapInstruction(c);
  1662. return c->getResultId();
  1663. }
  1664. Id Builder::makeDoubleConstant(double d, bool specConstant)
  1665. {
  1666. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1667. Id typeId = makeFloatType(64);
  1668. union { double db; unsigned long long ull; } u;
  1669. u.db = d;
  1670. unsigned long long value = u.ull;
  1671. unsigned op1 = value & 0xFFFFFFFF;
  1672. unsigned op2 = value >> 32;
  1673. // See if we already made it. Applies only to regular constants, because specialization constants
  1674. // must remain distinct for the purpose of applying a SpecId decoration.
  1675. if (! specConstant) {
  1676. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, op1, op2);
  1677. if (existing)
  1678. return existing;
  1679. }
  1680. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1681. c->reserveOperands(2);
  1682. c->addImmediateOperand(op1);
  1683. c->addImmediateOperand(op2);
  1684. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1685. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1686. module.mapInstruction(c);
  1687. return c->getResultId();
  1688. }
  1689. Id Builder::makeFloat16Constant(float f16, bool specConstant)
  1690. {
  1691. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1692. Id typeId = makeFloatType(16);
  1693. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(f16);
  1694. spvutils::HexFloat<spvutils::FloatProxy<spvutils::Float16>> f16Val(0);
  1695. fVal.castTo(f16Val, spvutils::kRoundToZero);
  1696. unsigned value = f16Val.value().getAsFloat().get_value();
  1697. // See if we already made it. Applies only to regular constants, because specialization constants
  1698. // must remain distinct for the purpose of applying a SpecId decoration.
  1699. if (!specConstant) {
  1700. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1701. if (existing)
  1702. return existing;
  1703. }
  1704. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1705. c->addImmediateOperand(value);
  1706. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1707. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1708. module.mapInstruction(c);
  1709. return c->getResultId();
  1710. }
  1711. Id Builder::makeBFloat16Constant(float bf16, bool specConstant)
  1712. {
  1713. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1714. Id typeId = makeBFloat16Type();
  1715. union {
  1716. float f;
  1717. uint32_t u;
  1718. } un;
  1719. un.f = bf16;
  1720. // take high 16b of fp32 value. This is effectively round-to-zero, other than certain NaNs.
  1721. unsigned value = un.u >> 16;
  1722. // See if we already made it. Applies only to regular constants, because specialization constants
  1723. // must remain distinct for the purpose of applying a SpecId decoration.
  1724. if (!specConstant) {
  1725. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1726. if (existing)
  1727. return existing;
  1728. }
  1729. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1730. c->addImmediateOperand(value);
  1731. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1732. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1733. module.mapInstruction(c);
  1734. return c->getResultId();
  1735. }
  1736. Id Builder::makeFloatE5M2Constant(float fe5m2, bool specConstant)
  1737. {
  1738. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1739. Id typeId = makeFloatE5M2Type();
  1740. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe5m2);
  1741. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE5M2>> fe5m2Val(0);
  1742. fVal.castTo(fe5m2Val, spvutils::kRoundToZero);
  1743. unsigned value = fe5m2Val.value().getAsFloat().get_value();
  1744. // See if we already made it. Applies only to regular constants, because specialization constants
  1745. // must remain distinct for the purpose of applying a SpecId decoration.
  1746. if (!specConstant) {
  1747. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1748. if (existing)
  1749. return existing;
  1750. }
  1751. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1752. c->addImmediateOperand(value);
  1753. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1754. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1755. module.mapInstruction(c);
  1756. return c->getResultId();
  1757. }
  1758. Id Builder::makeFloatE4M3Constant(float fe4m3, bool specConstant)
  1759. {
  1760. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1761. Id typeId = makeFloatE4M3Type();
  1762. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe4m3);
  1763. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE4M3>> fe4m3Val(0);
  1764. fVal.castTo(fe4m3Val, spvutils::kRoundToZero);
  1765. unsigned value = fe4m3Val.value().getAsFloat().get_value();
  1766. // See if we already made it. Applies only to regular constants, because specialization constants
  1767. // must remain distinct for the purpose of applying a SpecId decoration.
  1768. if (!specConstant) {
  1769. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1770. if (existing)
  1771. return existing;
  1772. }
  1773. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1774. c->addImmediateOperand(value);
  1775. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1776. groupedConstants[enumCast(Op::OpTypeFloat)].push_back(c);
  1777. module.mapInstruction(c);
  1778. return c->getResultId();
  1779. }
  1780. Id Builder::makeFpConstant(Id type, double d, bool specConstant)
  1781. {
  1782. const int width = getScalarTypeWidth(type);
  1783. assert(isFloatType(type));
  1784. switch (width) {
  1785. case 16:
  1786. return makeFloat16Constant((float)d, specConstant);
  1787. case 32:
  1788. return makeFloatConstant((float)d, specConstant);
  1789. case 64:
  1790. return makeDoubleConstant(d, specConstant);
  1791. default:
  1792. break;
  1793. }
  1794. assert(false);
  1795. return NoResult;
  1796. }
  1797. Id Builder::importNonSemanticShaderDebugInfoInstructions()
  1798. {
  1799. assert(emitNonSemanticShaderDebugInfo == true);
  1800. if(nonSemanticShaderDebugInfo == 0)
  1801. {
  1802. this->addExtension(spv::E_SPV_KHR_non_semantic_info);
  1803. nonSemanticShaderDebugInfo = this->import("NonSemantic.Shader.DebugInfo.100");
  1804. }
  1805. return nonSemanticShaderDebugInfo;
  1806. }
  1807. Id Builder::findCompositeConstant(Op typeClass, Op opcode, Id typeId, const std::vector<Id>& comps, size_t numMembers)
  1808. {
  1809. Instruction* constant = nullptr;
  1810. bool found = false;
  1811. for (int i = 0; i < (int)groupedConstants[enumCast(typeClass)].size(); ++i) {
  1812. constant = groupedConstants[enumCast(typeClass)][i];
  1813. if (constant->getTypeId() != typeId)
  1814. continue;
  1815. if (constant->getOpCode() != opcode) {
  1816. continue;
  1817. }
  1818. if (constant->getNumOperands() != (int)numMembers)
  1819. continue;
  1820. // same contents?
  1821. bool mismatch = false;
  1822. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1823. if (constant->getIdOperand(op) != comps[op]) {
  1824. mismatch = true;
  1825. break;
  1826. }
  1827. }
  1828. if (! mismatch) {
  1829. found = true;
  1830. break;
  1831. }
  1832. }
  1833. return found ? constant->getResultId() : NoResult;
  1834. }
  1835. Id Builder::findStructConstant(Id typeId, const std::vector<Id>& comps)
  1836. {
  1837. Instruction* constant = nullptr;
  1838. bool found = false;
  1839. for (int i = 0; i < (int)groupedStructConstants[typeId].size(); ++i) {
  1840. constant = groupedStructConstants[typeId][i];
  1841. // same contents?
  1842. bool mismatch = false;
  1843. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1844. if (constant->getIdOperand(op) != comps[op]) {
  1845. mismatch = true;
  1846. break;
  1847. }
  1848. }
  1849. if (! mismatch) {
  1850. found = true;
  1851. break;
  1852. }
  1853. }
  1854. return found ? constant->getResultId() : NoResult;
  1855. }
  1856. // Comments in header
  1857. Id Builder::makeCompositeConstant(Id typeId, const std::vector<Id>& members, bool specConstant)
  1858. {
  1859. assert(typeId);
  1860. Op typeClass = getTypeClass(typeId);
  1861. bool replicate = false;
  1862. size_t numMembers = members.size();
  1863. if (useReplicatedComposites || typeClass == Op::OpTypeCooperativeVectorNV) {
  1864. // use replicate if all members are the same
  1865. replicate = numMembers > 0 &&
  1866. std::equal(members.begin() + 1, members.end(), members.begin());
  1867. if (replicate) {
  1868. numMembers = 1;
  1869. addCapability(spv::Capability::ReplicatedCompositesEXT);
  1870. addExtension(spv::E_SPV_EXT_replicated_composites);
  1871. }
  1872. }
  1873. Op opcode = replicate ?
  1874. (specConstant ? Op::OpSpecConstantCompositeReplicateEXT : Op::OpConstantCompositeReplicateEXT) :
  1875. (specConstant ? Op::OpSpecConstantComposite : Op::OpConstantComposite);
  1876. switch (typeClass) {
  1877. case Op::OpTypeVector:
  1878. case Op::OpTypeArray:
  1879. case Op::OpTypeMatrix:
  1880. case Op::OpTypeCooperativeMatrixKHR:
  1881. case Op::OpTypeCooperativeMatrixNV:
  1882. case Op::OpTypeCooperativeVectorNV:
  1883. if (! specConstant) {
  1884. Id existing = findCompositeConstant(typeClass, opcode, typeId, members, numMembers);
  1885. if (existing)
  1886. return existing;
  1887. }
  1888. break;
  1889. case Op::OpTypeStruct:
  1890. if (! specConstant) {
  1891. Id existing = findStructConstant(typeId, members);
  1892. if (existing)
  1893. return existing;
  1894. }
  1895. break;
  1896. default:
  1897. assert(0);
  1898. return makeFloatConstant(0.0);
  1899. }
  1900. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1901. c->reserveOperands(members.size());
  1902. for (size_t op = 0; op < numMembers; ++op)
  1903. c->addIdOperand(members[op]);
  1904. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1905. if (typeClass == Op::OpTypeStruct)
  1906. groupedStructConstants[typeId].push_back(c);
  1907. else
  1908. groupedConstants[enumCast(typeClass)].push_back(c);
  1909. module.mapInstruction(c);
  1910. return c->getResultId();
  1911. }
  1912. Instruction* Builder::addEntryPoint(ExecutionModel model, Function* function, const char* name)
  1913. {
  1914. Instruction* entryPoint = new Instruction(Op::OpEntryPoint);
  1915. entryPoint->reserveOperands(3);
  1916. entryPoint->addImmediateOperand(model);
  1917. entryPoint->addIdOperand(function->getId());
  1918. entryPoint->addStringOperand(name);
  1919. entryPoints.push_back(std::unique_ptr<Instruction>(entryPoint));
  1920. return entryPoint;
  1921. }
  1922. // Currently relying on the fact that all 'value' of interest are small non-negative values.
  1923. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, int value1, int value2, int value3)
  1924. {
  1925. // entryPoint can be null if we are in compile-only mode
  1926. if (!entryPoint)
  1927. return;
  1928. Instruction* instr = new Instruction(Op::OpExecutionMode);
  1929. instr->reserveOperands(3);
  1930. instr->addIdOperand(entryPoint->getId());
  1931. instr->addImmediateOperand(mode);
  1932. if (value1 >= 0)
  1933. instr->addImmediateOperand(value1);
  1934. if (value2 >= 0)
  1935. instr->addImmediateOperand(value2);
  1936. if (value3 >= 0)
  1937. instr->addImmediateOperand(value3);
  1938. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  1939. }
  1940. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, const std::vector<unsigned>& literals)
  1941. {
  1942. // entryPoint can be null if we are in compile-only mode
  1943. if (!entryPoint)
  1944. return;
  1945. Instruction* instr = new Instruction(Op::OpExecutionMode);
  1946. instr->reserveOperands(literals.size() + 2);
  1947. instr->addIdOperand(entryPoint->getId());
  1948. instr->addImmediateOperand(mode);
  1949. for (auto literal : literals)
  1950. instr->addImmediateOperand(literal);
  1951. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  1952. }
  1953. void Builder::addExecutionModeId(Function* entryPoint, ExecutionMode mode, const std::vector<Id>& operandIds)
  1954. {
  1955. // entryPoint can be null if we are in compile-only mode
  1956. if (!entryPoint)
  1957. return;
  1958. Instruction* instr = new Instruction(Op::OpExecutionModeId);
  1959. instr->reserveOperands(operandIds.size() + 2);
  1960. instr->addIdOperand(entryPoint->getId());
  1961. instr->addImmediateOperand(mode);
  1962. for (auto operandId : operandIds)
  1963. instr->addIdOperand(operandId);
  1964. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  1965. }
  1966. void Builder::addName(Id id, const char* string)
  1967. {
  1968. Instruction* name = new Instruction(Op::OpName);
  1969. name->reserveOperands(2);
  1970. name->addIdOperand(id);
  1971. name->addStringOperand(string);
  1972. names.push_back(std::unique_ptr<Instruction>(name));
  1973. }
  1974. void Builder::addMemberName(Id id, int memberNumber, const char* string)
  1975. {
  1976. Instruction* name = new Instruction(Op::OpMemberName);
  1977. name->reserveOperands(3);
  1978. name->addIdOperand(id);
  1979. name->addImmediateOperand(memberNumber);
  1980. name->addStringOperand(string);
  1981. names.push_back(std::unique_ptr<Instruction>(name));
  1982. }
  1983. void Builder::addDecoration(Id id, Decoration decoration, int num)
  1984. {
  1985. if (decoration == spv::Decoration::Max)
  1986. return;
  1987. Instruction* dec = new Instruction(Op::OpDecorate);
  1988. dec->reserveOperands(2);
  1989. dec->addIdOperand(id);
  1990. dec->addImmediateOperand(decoration);
  1991. if (num >= 0)
  1992. dec->addImmediateOperand(num);
  1993. decorations.insert(std::unique_ptr<Instruction>(dec));
  1994. }
  1995. void Builder::addDecoration(Id id, Decoration decoration, const char* s)
  1996. {
  1997. if (decoration == spv::Decoration::Max)
  1998. return;
  1999. Instruction* dec = new Instruction(Op::OpDecorateString);
  2000. dec->reserveOperands(3);
  2001. dec->addIdOperand(id);
  2002. dec->addImmediateOperand(decoration);
  2003. dec->addStringOperand(s);
  2004. decorations.insert(std::unique_ptr<Instruction>(dec));
  2005. }
  2006. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<unsigned>& literals)
  2007. {
  2008. if (decoration == spv::Decoration::Max)
  2009. return;
  2010. Instruction* dec = new Instruction(Op::OpDecorate);
  2011. dec->reserveOperands(literals.size() + 2);
  2012. dec->addIdOperand(id);
  2013. dec->addImmediateOperand(decoration);
  2014. for (auto literal : literals)
  2015. dec->addImmediateOperand(literal);
  2016. decorations.insert(std::unique_ptr<Instruction>(dec));
  2017. }
  2018. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<const char*>& strings)
  2019. {
  2020. if (decoration == spv::Decoration::Max)
  2021. return;
  2022. Instruction* dec = new Instruction(Op::OpDecorateString);
  2023. dec->reserveOperands(strings.size() + 2);
  2024. dec->addIdOperand(id);
  2025. dec->addImmediateOperand(decoration);
  2026. for (auto string : strings)
  2027. dec->addStringOperand(string);
  2028. decorations.insert(std::unique_ptr<Instruction>(dec));
  2029. }
  2030. void Builder::addLinkageDecoration(Id id, const char* name, spv::LinkageType linkType) {
  2031. Instruction* dec = new Instruction(Op::OpDecorate);
  2032. dec->reserveOperands(4);
  2033. dec->addIdOperand(id);
  2034. dec->addImmediateOperand(spv::Decoration::LinkageAttributes);
  2035. dec->addStringOperand(name);
  2036. dec->addImmediateOperand(linkType);
  2037. decorations.insert(std::unique_ptr<Instruction>(dec));
  2038. }
  2039. void Builder::addDecorationId(Id id, Decoration decoration, Id idDecoration)
  2040. {
  2041. if (decoration == spv::Decoration::Max)
  2042. return;
  2043. Instruction* dec = new Instruction(Op::OpDecorateId);
  2044. dec->reserveOperands(3);
  2045. dec->addIdOperand(id);
  2046. dec->addImmediateOperand(decoration);
  2047. dec->addIdOperand(idDecoration);
  2048. decorations.insert(std::unique_ptr<Instruction>(dec));
  2049. }
  2050. void Builder::addDecorationId(Id id, Decoration decoration, const std::vector<Id>& operandIds)
  2051. {
  2052. if(decoration == spv::Decoration::Max)
  2053. return;
  2054. Instruction* dec = new Instruction(Op::OpDecorateId);
  2055. dec->reserveOperands(operandIds.size() + 2);
  2056. dec->addIdOperand(id);
  2057. dec->addImmediateOperand(decoration);
  2058. for (auto operandId : operandIds)
  2059. dec->addIdOperand(operandId);
  2060. decorations.insert(std::unique_ptr<Instruction>(dec));
  2061. }
  2062. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, int num)
  2063. {
  2064. if (decoration == spv::Decoration::Max)
  2065. return;
  2066. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2067. dec->reserveOperands(3);
  2068. dec->addIdOperand(id);
  2069. dec->addImmediateOperand(member);
  2070. dec->addImmediateOperand(decoration);
  2071. if (num >= 0)
  2072. dec->addImmediateOperand(num);
  2073. decorations.insert(std::unique_ptr<Instruction>(dec));
  2074. }
  2075. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const char *s)
  2076. {
  2077. if (decoration == spv::Decoration::Max)
  2078. return;
  2079. Instruction* dec = new Instruction(Op::OpMemberDecorateStringGOOGLE);
  2080. dec->reserveOperands(4);
  2081. dec->addIdOperand(id);
  2082. dec->addImmediateOperand(member);
  2083. dec->addImmediateOperand(decoration);
  2084. dec->addStringOperand(s);
  2085. decorations.insert(std::unique_ptr<Instruction>(dec));
  2086. }
  2087. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<unsigned>& literals)
  2088. {
  2089. if (decoration == spv::Decoration::Max)
  2090. return;
  2091. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2092. dec->reserveOperands(literals.size() + 3);
  2093. dec->addIdOperand(id);
  2094. dec->addImmediateOperand(member);
  2095. dec->addImmediateOperand(decoration);
  2096. for (auto literal : literals)
  2097. dec->addImmediateOperand(literal);
  2098. decorations.insert(std::unique_ptr<Instruction>(dec));
  2099. }
  2100. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<const char*>& strings)
  2101. {
  2102. if (decoration == spv::Decoration::Max)
  2103. return;
  2104. Instruction* dec = new Instruction(Op::OpMemberDecorateString);
  2105. dec->reserveOperands(strings.size() + 3);
  2106. dec->addIdOperand(id);
  2107. dec->addImmediateOperand(member);
  2108. dec->addImmediateOperand(decoration);
  2109. for (auto string : strings)
  2110. dec->addStringOperand(string);
  2111. decorations.insert(std::unique_ptr<Instruction>(dec));
  2112. }
  2113. void Builder::addInstruction(std::unique_ptr<Instruction> inst) {
  2114. // Phis must appear first in their block, don't insert line tracking instructions
  2115. // in front of them, just add the OpPhi and return.
  2116. if (inst->getOpCode() == Op::OpPhi) {
  2117. buildPoint->addInstruction(std::move(inst));
  2118. return;
  2119. }
  2120. // Optionally insert OpDebugScope
  2121. if (emitNonSemanticShaderDebugInfo && dirtyScopeTracker) {
  2122. if (buildPoint->updateDebugScope(currentDebugScopeId.top())) {
  2123. auto scopeInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2124. scopeInst->reserveOperands(3);
  2125. scopeInst->addIdOperand(nonSemanticShaderDebugInfo);
  2126. scopeInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugScope);
  2127. scopeInst->addIdOperand(currentDebugScopeId.top());
  2128. buildPoint->addInstruction(std::move(scopeInst));
  2129. }
  2130. dirtyScopeTracker = false;
  2131. }
  2132. // Insert OpLine/OpDebugLine if the debug source location has changed
  2133. if (trackDebugInfo && dirtyLineTracker) {
  2134. if (buildPoint->updateDebugSourceLocation(currentLine, 0, currentFileId)) {
  2135. if (emitSpirvDebugInfo) {
  2136. auto lineInst = std::make_unique<Instruction>(Op::OpLine);
  2137. lineInst->reserveOperands(3);
  2138. lineInst->addIdOperand(currentFileId);
  2139. lineInst->addImmediateOperand(currentLine);
  2140. lineInst->addImmediateOperand(0);
  2141. buildPoint->addInstruction(std::move(lineInst));
  2142. }
  2143. if (emitNonSemanticShaderDebugInfo) {
  2144. auto lineInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2145. lineInst->reserveOperands(7);
  2146. lineInst->addIdOperand(nonSemanticShaderDebugInfo);
  2147. lineInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLine);
  2148. lineInst->addIdOperand(makeDebugSource(currentFileId));
  2149. lineInst->addIdOperand(makeUintConstant(currentLine));
  2150. lineInst->addIdOperand(makeUintConstant(currentLine));
  2151. lineInst->addIdOperand(makeUintConstant(0));
  2152. lineInst->addIdOperand(makeUintConstant(0));
  2153. buildPoint->addInstruction(std::move(lineInst));
  2154. }
  2155. }
  2156. dirtyLineTracker = false;
  2157. }
  2158. buildPoint->addInstruction(std::move(inst));
  2159. }
  2160. void Builder::addInstructionNoDebugInfo(std::unique_ptr<Instruction> inst) {
  2161. buildPoint->addInstruction(std::move(inst));
  2162. }
  2163. // Comments in header
  2164. Function* Builder::makeEntryPoint(const char* entryPoint)
  2165. {
  2166. assert(! entryPointFunction);
  2167. auto const returnType = makeVoidType();
  2168. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2169. if(sourceLang == spv::SourceLanguage::HLSL) {
  2170. emitNonSemanticShaderDebugInfo = false;
  2171. }
  2172. Block* entry = nullptr;
  2173. entryPointFunction = makeFunctionEntry(NoPrecision, returnType, entryPoint, LinkageType::Max, {}, {}, &entry);
  2174. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2175. return entryPointFunction;
  2176. }
  2177. // Comments in header
  2178. Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name, LinkageType linkType,
  2179. const std::vector<Id>& paramTypes,
  2180. const std::vector<std::vector<Decoration>>& decorations, Block** entry)
  2181. {
  2182. // Make the function and initial instructions in it
  2183. Id typeId = makeFunctionType(returnType, paramTypes);
  2184. Id firstParamId = paramTypes.size() == 0 ? 0 : getUniqueIds((int)paramTypes.size());
  2185. Id funcId = getUniqueId();
  2186. Function* function = new Function(funcId, returnType, typeId, firstParamId, linkType, name, module);
  2187. // Set up the precisions
  2188. setPrecision(function->getId(), precision);
  2189. function->setReturnPrecision(precision);
  2190. for (unsigned p = 0; p < (unsigned)decorations.size(); ++p) {
  2191. for (int d = 0; d < (int)decorations[p].size(); ++d) {
  2192. addDecoration(firstParamId + p, decorations[p][d]);
  2193. function->addParamPrecision(p, decorations[p][d]);
  2194. }
  2195. }
  2196. // reset last debug scope
  2197. if (emitNonSemanticShaderDebugInfo) {
  2198. dirtyScopeTracker = true;
  2199. }
  2200. // CFG
  2201. assert(entry != nullptr);
  2202. *entry = new Block(getUniqueId(), *function);
  2203. function->addBlock(*entry);
  2204. setBuildPoint(*entry);
  2205. if (name)
  2206. addName(function->getId(), name);
  2207. functions.push_back(std::unique_ptr<Function>(function));
  2208. return function;
  2209. }
  2210. void Builder::setupFunctionDebugInfo(Function* function, const char* name, const std::vector<Id>& paramTypes,
  2211. const std::vector<char const*>& paramNames)
  2212. {
  2213. if (!emitNonSemanticShaderDebugInfo)
  2214. return;
  2215. Id nameId = getStringId(unmangleFunctionName(name));
  2216. Id funcTypeId = function->getFuncTypeId();
  2217. assert(debugId[funcTypeId] != 0);
  2218. Id funcId = function->getId();
  2219. assert(funcId != 0);
  2220. // Make the debug function instruction
  2221. Id debugFuncId = makeDebugFunction(function, nameId, funcTypeId);
  2222. debugId[funcId] = debugFuncId;
  2223. currentDebugScopeId.push(debugFuncId);
  2224. // DebugScope and DebugLine for parameter DebugDeclares
  2225. assert(paramTypes.size() == paramNames.size());
  2226. if ((int)paramTypes.size() > 0) {
  2227. Id firstParamId = function->getParamId(0);
  2228. for (size_t p = 0; p < paramTypes.size(); ++p) {
  2229. bool passByRef = false;
  2230. Id paramTypeId = paramTypes[p];
  2231. // For pointer-typed parameters, they are actually passed by reference and we need unwrap the pointer to get the actual parameter type.
  2232. if (isPointerType(paramTypeId) || isArrayType(paramTypeId)) {
  2233. passByRef = true;
  2234. paramTypeId = getContainedTypeId(paramTypeId);
  2235. }
  2236. auto const& paramName = paramNames[p];
  2237. auto const debugLocalVariableId = createDebugLocalVariable(debugId[paramTypeId], paramName, p + 1);
  2238. auto const paramId = static_cast<Id>(firstParamId + p);
  2239. debugId[paramId] = debugLocalVariableId;
  2240. if (passByRef) {
  2241. makeDebugDeclare(debugLocalVariableId, paramId);
  2242. } else {
  2243. makeDebugValue(debugLocalVariableId, paramId);
  2244. }
  2245. }
  2246. }
  2247. // Clear debug scope stack
  2248. if (emitNonSemanticShaderDebugInfo)
  2249. currentDebugScopeId.pop();
  2250. }
  2251. Id Builder::makeDebugFunction([[maybe_unused]] Function* function, Id nameId, Id funcTypeId)
  2252. {
  2253. assert(function != nullptr);
  2254. assert(nameId != 0);
  2255. assert(funcTypeId != 0);
  2256. assert(debugId[funcTypeId] != 0);
  2257. Id funcId = getUniqueId();
  2258. auto type = new Instruction(funcId, makeVoidType(), Op::OpExtInst);
  2259. type->reserveOperands(11);
  2260. type->addIdOperand(nonSemanticShaderDebugInfo);
  2261. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunction);
  2262. type->addIdOperand(nameId);
  2263. type->addIdOperand(debugId[funcTypeId]);
  2264. type->addIdOperand(makeDebugSource(currentFileId)); // TODO: This points to file of definition instead of declaration
  2265. type->addIdOperand(makeUintConstant(currentLine)); // TODO: This points to line of definition instead of declaration
  2266. type->addIdOperand(makeUintConstant(0)); // column
  2267. type->addIdOperand(makeDebugCompilationUnit()); // scope
  2268. type->addIdOperand(nameId); // linkage name
  2269. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  2270. type->addIdOperand(makeUintConstant(currentLine));
  2271. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  2272. module.mapInstruction(type);
  2273. return funcId;
  2274. }
  2275. Id Builder::makeDebugLexicalBlock(uint32_t line, uint32_t column) {
  2276. assert(!currentDebugScopeId.empty());
  2277. Id lexId = getUniqueId();
  2278. auto lex = new Instruction(lexId, makeVoidType(), Op::OpExtInst);
  2279. lex->reserveOperands(6);
  2280. lex->addIdOperand(nonSemanticShaderDebugInfo);
  2281. lex->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLexicalBlock);
  2282. lex->addIdOperand(makeDebugSource(currentFileId));
  2283. lex->addIdOperand(makeUintConstant(line));
  2284. lex->addIdOperand(makeUintConstant(column)); // column
  2285. lex->addIdOperand(currentDebugScopeId.top()); // scope
  2286. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(lex));
  2287. module.mapInstruction(lex);
  2288. return lexId;
  2289. }
  2290. std::string Builder::unmangleFunctionName(std::string const& name) const
  2291. {
  2292. assert(name.length() > 0);
  2293. if(name.rfind('(') != std::string::npos) {
  2294. return name.substr(0, name.rfind('('));
  2295. } else {
  2296. return name;
  2297. }
  2298. }
  2299. // Comments in header
  2300. void Builder::makeReturn(bool implicit, Id retVal)
  2301. {
  2302. if (retVal) {
  2303. Instruction* inst = new Instruction(NoResult, NoType, Op::OpReturnValue);
  2304. inst->addIdOperand(retVal);
  2305. addInstruction(std::unique_ptr<Instruction>(inst));
  2306. } else
  2307. addInstruction(std::unique_ptr<Instruction>(new Instruction(NoResult, NoType, Op::OpReturn)));
  2308. if (! implicit)
  2309. createAndSetNoPredecessorBlock("post-return");
  2310. }
  2311. // Comments in header
  2312. void Builder::enterLexicalBlock(uint32_t line, uint32_t column)
  2313. {
  2314. if (!emitNonSemanticShaderDebugInfo) {
  2315. return;
  2316. }
  2317. // Generate new lexical scope debug instruction
  2318. Id lexId = makeDebugLexicalBlock(line, column);
  2319. currentDebugScopeId.push(lexId);
  2320. dirtyScopeTracker = true;
  2321. }
  2322. // Comments in header
  2323. void Builder::leaveLexicalBlock()
  2324. {
  2325. if (!emitNonSemanticShaderDebugInfo) {
  2326. return;
  2327. }
  2328. // Pop current scope from stack and clear current scope
  2329. currentDebugScopeId.pop();
  2330. dirtyScopeTracker = true;
  2331. }
  2332. // Comments in header
  2333. void Builder::enterFunction(Function const* function)
  2334. {
  2335. // Save and disable debugInfo for HLSL entry point function. It is a wrapper
  2336. // function with no user code in it.
  2337. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2338. if (sourceLang == spv::SourceLanguage::HLSL && function == entryPointFunction) {
  2339. emitNonSemanticShaderDebugInfo = false;
  2340. }
  2341. if (emitNonSemanticShaderDebugInfo) {
  2342. // Initialize scope state
  2343. Id funcId = function->getFuncId();
  2344. currentDebugScopeId.push(debugId[funcId]);
  2345. // Create DebugFunctionDefinition
  2346. spv::Id resultId = getUniqueId();
  2347. Instruction* defInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  2348. defInst->reserveOperands(4);
  2349. defInst->addIdOperand(nonSemanticShaderDebugInfo);
  2350. defInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunctionDefinition);
  2351. defInst->addIdOperand(debugId[funcId]);
  2352. defInst->addIdOperand(funcId);
  2353. addInstruction(std::unique_ptr<Instruction>(defInst));
  2354. }
  2355. if (auto linkType = function->getLinkType(); linkType != LinkageType::Max) {
  2356. Id funcId = function->getFuncId();
  2357. addCapability(Capability::Linkage);
  2358. addLinkageDecoration(funcId, function->getExportName(), linkType);
  2359. }
  2360. }
  2361. // Comments in header
  2362. void Builder::leaveFunction()
  2363. {
  2364. Block* block = buildPoint;
  2365. Function& function = buildPoint->getParent();
  2366. assert(block);
  2367. // If our function did not contain a return, add a return void now.
  2368. if (! block->isTerminated()) {
  2369. if (function.getReturnType() == makeVoidType())
  2370. makeReturn(true);
  2371. else {
  2372. makeReturn(true, createUndefined(function.getReturnType()));
  2373. }
  2374. }
  2375. // Clear function scope from debug scope stack
  2376. if (emitNonSemanticShaderDebugInfo)
  2377. currentDebugScopeId.pop();
  2378. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2379. }
  2380. // Comments in header
  2381. void Builder::makeStatementTerminator(spv::Op opcode, const char *name)
  2382. {
  2383. addInstruction(std::unique_ptr<Instruction>(new Instruction(opcode)));
  2384. createAndSetNoPredecessorBlock(name);
  2385. }
  2386. // Comments in header
  2387. void Builder::makeStatementTerminator(spv::Op opcode, const std::vector<Id>& operands, const char* name)
  2388. {
  2389. // It's assumed that the terminator instruction is always of void return type
  2390. // However in future if there is a need for non void return type, new helper
  2391. // methods can be created.
  2392. createNoResultOp(opcode, operands);
  2393. createAndSetNoPredecessorBlock(name);
  2394. }
  2395. // Comments in header
  2396. Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer,
  2397. bool const compilerGenerated)
  2398. {
  2399. Id pointerType = makePointer(storageClass, type);
  2400. Instruction* inst = new Instruction(getUniqueId(), pointerType, Op::OpVariable);
  2401. inst->addImmediateOperand(storageClass);
  2402. if (initializer != NoResult)
  2403. inst->addIdOperand(initializer);
  2404. switch (storageClass) {
  2405. case StorageClass::Function:
  2406. // Validation rules require the declaration in the entry block
  2407. buildPoint->getParent().addLocalVariable(std::unique_ptr<Instruction>(inst));
  2408. if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
  2409. {
  2410. auto const debugLocalVariableId = createDebugLocalVariable(debugId[type], name);
  2411. debugId[inst->getResultId()] = debugLocalVariableId;
  2412. makeDebugDeclare(debugLocalVariableId, inst->getResultId());
  2413. }
  2414. break;
  2415. default:
  2416. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  2417. module.mapInstruction(inst);
  2418. if (emitNonSemanticShaderDebugInfo)
  2419. {
  2420. auto const debugResultId = createDebugGlobalVariable(debugId[type], name, inst->getResultId());
  2421. debugId[inst->getResultId()] = debugResultId;
  2422. }
  2423. break;
  2424. }
  2425. if (name)
  2426. addName(inst->getResultId(), name);
  2427. setPrecision(inst->getResultId(), precision);
  2428. return inst->getResultId();
  2429. }
  2430. // Comments in header
  2431. Id Builder::createUndefined(Id type)
  2432. {
  2433. Instruction* inst = new Instruction(getUniqueId(), type, Op::OpUndef);
  2434. addInstruction(std::unique_ptr<Instruction>(inst));
  2435. return inst->getResultId();
  2436. }
  2437. // av/vis/nonprivate are unnecessary and illegal for some storage classes.
  2438. spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc)
  2439. const
  2440. {
  2441. switch (sc) {
  2442. case spv::StorageClass::Uniform:
  2443. case spv::StorageClass::Workgroup:
  2444. case spv::StorageClass::StorageBuffer:
  2445. case spv::StorageClass::PhysicalStorageBufferEXT:
  2446. break;
  2447. default:
  2448. memoryAccess = spv::MemoryAccessMask(memoryAccess &
  2449. ~(spv::MemoryAccessMask::MakePointerAvailableKHR |
  2450. spv::MemoryAccessMask::MakePointerVisibleKHR |
  2451. spv::MemoryAccessMask::NonPrivatePointerKHR));
  2452. break;
  2453. }
  2454. return memoryAccess;
  2455. }
  2456. // Comments in header
  2457. void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope,
  2458. unsigned int alignment)
  2459. {
  2460. Instruction* store = new Instruction(Op::OpStore);
  2461. store->reserveOperands(2);
  2462. store->addIdOperand(lValue);
  2463. store->addIdOperand(rValue);
  2464. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2465. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2466. store->addImmediateOperand(memoryAccess);
  2467. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2468. store->addImmediateOperand(alignment);
  2469. }
  2470. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerAvailableKHR)) {
  2471. store->addIdOperand(makeUintConstant(scope));
  2472. }
  2473. }
  2474. addInstruction(std::unique_ptr<Instruction>(store));
  2475. }
  2476. // Comments in header
  2477. Id Builder::createLoad(Id lValue, spv::Decoration precision, spv::MemoryAccessMask memoryAccess,
  2478. spv::Scope scope, unsigned int alignment)
  2479. {
  2480. Instruction* load = new Instruction(getUniqueId(), getDerefTypeId(lValue), Op::OpLoad);
  2481. load->addIdOperand(lValue);
  2482. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2483. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2484. load->addImmediateOperand(memoryAccess);
  2485. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2486. load->addImmediateOperand(alignment);
  2487. }
  2488. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerVisibleKHR)) {
  2489. load->addIdOperand(makeUintConstant(scope));
  2490. }
  2491. }
  2492. addInstruction(std::unique_ptr<Instruction>(load));
  2493. setPrecision(load->getResultId(), precision);
  2494. return load->getResultId();
  2495. }
  2496. // Comments in header
  2497. Id Builder::createAccessChain(StorageClass storageClass, Id base, const std::vector<Id>& offsets)
  2498. {
  2499. // Figure out the final resulting type.
  2500. Id typeId = getResultingAccessChainType();
  2501. typeId = makePointer(storageClass, typeId);
  2502. // Make the instruction
  2503. Instruction* chain = new Instruction(getUniqueId(), typeId, Op::OpAccessChain);
  2504. chain->reserveOperands(offsets.size() + 1);
  2505. chain->addIdOperand(base);
  2506. for (int i = 0; i < (int)offsets.size(); ++i)
  2507. chain->addIdOperand(offsets[i]);
  2508. addInstruction(std::unique_ptr<Instruction>(chain));
  2509. return chain->getResultId();
  2510. }
  2511. Id Builder::createArrayLength(Id base, unsigned int member)
  2512. {
  2513. spv::Id intType = makeUintType(32);
  2514. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpArrayLength);
  2515. length->reserveOperands(2);
  2516. length->addIdOperand(base);
  2517. length->addImmediateOperand(member);
  2518. addInstruction(std::unique_ptr<Instruction>(length));
  2519. return length->getResultId();
  2520. }
  2521. Id Builder::createCooperativeMatrixLengthKHR(Id type)
  2522. {
  2523. spv::Id intType = makeUintType(32);
  2524. // Generate code for spec constants if in spec constant operation
  2525. // generation mode.
  2526. if (generatingOpCodeForSpecConst) {
  2527. return createSpecConstantOp(Op::OpCooperativeMatrixLengthKHR, intType, std::vector<Id>(1, type), std::vector<Id>());
  2528. }
  2529. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthKHR);
  2530. length->addIdOperand(type);
  2531. addInstruction(std::unique_ptr<Instruction>(length));
  2532. return length->getResultId();
  2533. }
  2534. Id Builder::createCooperativeMatrixLengthNV(Id type)
  2535. {
  2536. spv::Id intType = makeUintType(32);
  2537. // Generate code for spec constants if in spec constant operation
  2538. // generation mode.
  2539. if (generatingOpCodeForSpecConst) {
  2540. return createSpecConstantOp(Op::OpCooperativeMatrixLengthNV, intType, std::vector<Id>(1, type), std::vector<Id>());
  2541. }
  2542. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthNV);
  2543. length->addIdOperand(type);
  2544. addInstruction(std::unique_ptr<Instruction>(length));
  2545. return length->getResultId();
  2546. }
  2547. Id Builder::createCompositeExtract(Id composite, Id typeId, unsigned index)
  2548. {
  2549. // Generate code for spec constants if in spec constant operation
  2550. // generation mode.
  2551. if (generatingOpCodeForSpecConst) {
  2552. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite),
  2553. std::vector<Id>(1, index));
  2554. }
  2555. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2556. extract->reserveOperands(2);
  2557. extract->addIdOperand(composite);
  2558. extract->addImmediateOperand(index);
  2559. addInstruction(std::unique_ptr<Instruction>(extract));
  2560. return extract->getResultId();
  2561. }
  2562. Id Builder::createCompositeExtract(Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2563. {
  2564. // Generate code for spec constants if in spec constant operation
  2565. // generation mode.
  2566. if (generatingOpCodeForSpecConst) {
  2567. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite), indexes);
  2568. }
  2569. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2570. extract->reserveOperands(indexes.size() + 1);
  2571. extract->addIdOperand(composite);
  2572. for (int i = 0; i < (int)indexes.size(); ++i)
  2573. extract->addImmediateOperand(indexes[i]);
  2574. addInstruction(std::unique_ptr<Instruction>(extract));
  2575. return extract->getResultId();
  2576. }
  2577. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, unsigned index)
  2578. {
  2579. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2580. insert->reserveOperands(3);
  2581. insert->addIdOperand(object);
  2582. insert->addIdOperand(composite);
  2583. insert->addImmediateOperand(index);
  2584. addInstruction(std::unique_ptr<Instruction>(insert));
  2585. return insert->getResultId();
  2586. }
  2587. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2588. {
  2589. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2590. insert->reserveOperands(indexes.size() + 2);
  2591. insert->addIdOperand(object);
  2592. insert->addIdOperand(composite);
  2593. for (int i = 0; i < (int)indexes.size(); ++i)
  2594. insert->addImmediateOperand(indexes[i]);
  2595. addInstruction(std::unique_ptr<Instruction>(insert));
  2596. return insert->getResultId();
  2597. }
  2598. Id Builder::createVectorExtractDynamic(Id vector, Id typeId, Id componentIndex)
  2599. {
  2600. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpVectorExtractDynamic);
  2601. extract->reserveOperands(2);
  2602. extract->addIdOperand(vector);
  2603. extract->addIdOperand(componentIndex);
  2604. addInstruction(std::unique_ptr<Instruction>(extract));
  2605. return extract->getResultId();
  2606. }
  2607. Id Builder::createVectorInsertDynamic(Id vector, Id typeId, Id component, Id componentIndex)
  2608. {
  2609. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpVectorInsertDynamic);
  2610. insert->reserveOperands(3);
  2611. insert->addIdOperand(vector);
  2612. insert->addIdOperand(component);
  2613. insert->addIdOperand(componentIndex);
  2614. addInstruction(std::unique_ptr<Instruction>(insert));
  2615. return insert->getResultId();
  2616. }
  2617. // An opcode that has no operands, no result id, and no type
  2618. void Builder::createNoResultOp(Op opCode)
  2619. {
  2620. Instruction* op = new Instruction(opCode);
  2621. addInstruction(std::unique_ptr<Instruction>(op));
  2622. }
  2623. // An opcode that has one id operand, no result id, and no type
  2624. void Builder::createNoResultOp(Op opCode, Id operand)
  2625. {
  2626. Instruction* op = new Instruction(opCode);
  2627. op->addIdOperand(operand);
  2628. addInstruction(std::unique_ptr<Instruction>(op));
  2629. }
  2630. // An opcode that has one or more operands, no result id, and no type
  2631. void Builder::createNoResultOp(Op opCode, const std::vector<Id>& operands)
  2632. {
  2633. Instruction* op = new Instruction(opCode);
  2634. op->reserveOperands(operands.size());
  2635. for (auto id : operands) {
  2636. op->addIdOperand(id);
  2637. }
  2638. addInstruction(std::unique_ptr<Instruction>(op));
  2639. }
  2640. // An opcode that has multiple operands, no result id, and no type
  2641. void Builder::createNoResultOp(Op opCode, const std::vector<IdImmediate>& operands)
  2642. {
  2643. Instruction* op = new Instruction(opCode);
  2644. op->reserveOperands(operands.size());
  2645. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  2646. if (it->isId)
  2647. op->addIdOperand(it->word);
  2648. else
  2649. op->addImmediateOperand(it->word);
  2650. }
  2651. addInstruction(std::unique_ptr<Instruction>(op));
  2652. }
  2653. void Builder::createControlBarrier(Scope execution, Scope memory, MemorySemanticsMask semantics)
  2654. {
  2655. Instruction* op = new Instruction(Op::OpControlBarrier);
  2656. op->reserveOperands(3);
  2657. op->addIdOperand(makeUintConstant(execution));
  2658. op->addIdOperand(makeUintConstant(memory));
  2659. op->addIdOperand(makeUintConstant(semantics));
  2660. addInstruction(std::unique_ptr<Instruction>(op));
  2661. }
  2662. void Builder::createMemoryBarrier(Scope executionScope, MemorySemanticsMask memorySemantics)
  2663. {
  2664. Instruction* op = new Instruction(Op::OpMemoryBarrier);
  2665. op->reserveOperands(2);
  2666. op->addIdOperand(makeUintConstant((unsigned)executionScope));
  2667. op->addIdOperand(makeUintConstant((unsigned)memorySemantics));
  2668. addInstruction(std::unique_ptr<Instruction>(op));
  2669. }
  2670. // An opcode that has one operands, a result id, and a type
  2671. Id Builder::createUnaryOp(Op opCode, Id typeId, Id operand)
  2672. {
  2673. // Generate code for spec constants if in spec constant operation
  2674. // generation mode.
  2675. if (generatingOpCodeForSpecConst) {
  2676. return createSpecConstantOp(opCode, typeId, std::vector<Id>(1, operand), std::vector<Id>());
  2677. }
  2678. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2679. op->addIdOperand(operand);
  2680. addInstruction(std::unique_ptr<Instruction>(op));
  2681. return op->getResultId();
  2682. }
  2683. Id Builder::createBinOp(Op opCode, Id typeId, Id left, Id right)
  2684. {
  2685. // Generate code for spec constants if in spec constant operation
  2686. // generation mode.
  2687. if (generatingOpCodeForSpecConst) {
  2688. std::vector<Id> operands(2);
  2689. operands[0] = left; operands[1] = right;
  2690. return createSpecConstantOp(opCode, typeId, operands, std::vector<Id>());
  2691. }
  2692. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2693. op->reserveOperands(2);
  2694. op->addIdOperand(left);
  2695. op->addIdOperand(right);
  2696. addInstruction(std::unique_ptr<Instruction>(op));
  2697. return op->getResultId();
  2698. }
  2699. Id Builder::createTriOp(Op opCode, Id typeId, Id op1, Id op2, Id op3)
  2700. {
  2701. // Generate code for spec constants if in spec constant operation
  2702. // generation mode.
  2703. if (generatingOpCodeForSpecConst) {
  2704. std::vector<Id> operands(3);
  2705. operands[0] = op1;
  2706. operands[1] = op2;
  2707. operands[2] = op3;
  2708. return createSpecConstantOp(
  2709. opCode, typeId, operands, std::vector<Id>());
  2710. }
  2711. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2712. op->reserveOperands(3);
  2713. op->addIdOperand(op1);
  2714. op->addIdOperand(op2);
  2715. op->addIdOperand(op3);
  2716. addInstruction(std::unique_ptr<Instruction>(op));
  2717. return op->getResultId();
  2718. }
  2719. Id Builder::createOp(Op opCode, Id typeId, const std::vector<Id>& operands)
  2720. {
  2721. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2722. op->reserveOperands(operands.size());
  2723. for (auto id : operands)
  2724. op->addIdOperand(id);
  2725. addInstruction(std::unique_ptr<Instruction>(op));
  2726. return op->getResultId();
  2727. }
  2728. Id Builder::createOp(Op opCode, Id typeId, const std::vector<IdImmediate>& operands)
  2729. {
  2730. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2731. op->reserveOperands(operands.size());
  2732. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  2733. if (it->isId)
  2734. op->addIdOperand(it->word);
  2735. else
  2736. op->addImmediateOperand(it->word);
  2737. }
  2738. addInstruction(std::unique_ptr<Instruction>(op));
  2739. return op->getResultId();
  2740. }
  2741. Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector<Id>& operands,
  2742. const std::vector<unsigned>& literals)
  2743. {
  2744. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpSpecConstantOp);
  2745. op->reserveOperands(operands.size() + literals.size() + 1);
  2746. op->addImmediateOperand((unsigned) opCode);
  2747. for (auto it = operands.cbegin(); it != operands.cend(); ++it)
  2748. op->addIdOperand(*it);
  2749. for (auto it = literals.cbegin(); it != literals.cend(); ++it)
  2750. op->addImmediateOperand(*it);
  2751. module.mapInstruction(op);
  2752. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(op));
  2753. // OpSpecConstantOp's using 8 or 16 bit types require the associated capability
  2754. if (containsType(typeId, Op::OpTypeInt, 8))
  2755. addCapability(Capability::Int8);
  2756. if (containsType(typeId, Op::OpTypeInt, 16))
  2757. addCapability(Capability::Int16);
  2758. if (containsType(typeId, Op::OpTypeFloat, 16))
  2759. addCapability(Capability::Float16);
  2760. return op->getResultId();
  2761. }
  2762. Id Builder::createFunctionCall(spv::Function* function, const std::vector<spv::Id>& args)
  2763. {
  2764. Instruction* op = new Instruction(getUniqueId(), function->getReturnType(), Op::OpFunctionCall);
  2765. op->reserveOperands(args.size() + 1);
  2766. op->addIdOperand(function->getId());
  2767. for (int a = 0; a < (int)args.size(); ++a)
  2768. op->addIdOperand(args[a]);
  2769. addInstruction(std::unique_ptr<Instruction>(op));
  2770. return op->getResultId();
  2771. }
  2772. // Comments in header
  2773. Id Builder::createRvalueSwizzle(Decoration precision, Id typeId, Id source, const std::vector<unsigned>& channels)
  2774. {
  2775. if (channels.size() == 1)
  2776. return setPrecision(createCompositeExtract(source, typeId, channels.front()), precision);
  2777. if (generatingOpCodeForSpecConst) {
  2778. std::vector<Id> operands(2);
  2779. operands[0] = operands[1] = source;
  2780. return setPrecision(createSpecConstantOp(Op::OpVectorShuffle, typeId, operands, channels), precision);
  2781. }
  2782. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  2783. assert(isVector(source));
  2784. swizzle->reserveOperands(channels.size() + 2);
  2785. swizzle->addIdOperand(source);
  2786. swizzle->addIdOperand(source);
  2787. for (int i = 0; i < (int)channels.size(); ++i)
  2788. swizzle->addImmediateOperand(channels[i]);
  2789. addInstruction(std::unique_ptr<Instruction>(swizzle));
  2790. return setPrecision(swizzle->getResultId(), precision);
  2791. }
  2792. // Comments in header
  2793. Id Builder::createLvalueSwizzle(Id typeId, Id target, Id source, const std::vector<unsigned>& channels)
  2794. {
  2795. if (channels.size() == 1 && getNumComponents(source) == 1)
  2796. return createCompositeInsert(source, target, typeId, channels.front());
  2797. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  2798. assert(isVector(target));
  2799. swizzle->reserveOperands(2);
  2800. swizzle->addIdOperand(target);
  2801. assert(getNumComponents(source) == channels.size());
  2802. assert(isVector(source));
  2803. swizzle->addIdOperand(source);
  2804. // Set up an identity shuffle from the base value to the result value
  2805. unsigned int components[4];
  2806. int numTargetComponents = getNumComponents(target);
  2807. for (int i = 0; i < numTargetComponents; ++i)
  2808. components[i] = i;
  2809. // Punch in the l-value swizzle
  2810. for (int i = 0; i < (int)channels.size(); ++i)
  2811. components[channels[i]] = numTargetComponents + i;
  2812. // finish the instruction with these components selectors
  2813. swizzle->reserveOperands(numTargetComponents);
  2814. for (int i = 0; i < numTargetComponents; ++i)
  2815. swizzle->addImmediateOperand(components[i]);
  2816. addInstruction(std::unique_ptr<Instruction>(swizzle));
  2817. return swizzle->getResultId();
  2818. }
  2819. // Comments in header
  2820. void Builder::promoteScalar(Decoration precision, Id& left, Id& right)
  2821. {
  2822. int direction = getNumComponents(right) - getNumComponents(left);
  2823. if (direction > 0)
  2824. left = smearScalar(precision, left, makeVectorType(getTypeId(left), getNumComponents(right)));
  2825. else if (direction < 0)
  2826. right = smearScalar(precision, right, makeVectorType(getTypeId(right), getNumComponents(left)));
  2827. return;
  2828. }
  2829. // Comments in header
  2830. Id Builder::smearScalar(Decoration precision, Id scalar, Id vectorType)
  2831. {
  2832. assert(getNumComponents(scalar) == 1);
  2833. assert(getTypeId(scalar) == getScalarTypeId(vectorType));
  2834. int numComponents = getNumTypeComponents(vectorType);
  2835. if (numComponents == 1 && !isCooperativeVectorType(vectorType))
  2836. return scalar;
  2837. Instruction* smear = nullptr;
  2838. if (generatingOpCodeForSpecConst) {
  2839. auto members = std::vector<spv::Id>(numComponents, scalar);
  2840. // Sometime even in spec-constant-op mode, the temporary vector created by
  2841. // promoting a scalar might not be a spec constant. This should depend on
  2842. // the scalar.
  2843. // e.g.:
  2844. // const vec2 spec_const_result = a_spec_const_vec2 + a_front_end_const_scalar;
  2845. // In such cases, the temporary vector created from a_front_end_const_scalar
  2846. // is not a spec constant vector, even though the binary operation node is marked
  2847. // as 'specConstant' and we are in spec-constant-op mode.
  2848. auto result_id = makeCompositeConstant(vectorType, members, isSpecConstant(scalar));
  2849. smear = module.getInstruction(result_id);
  2850. } else {
  2851. bool replicate = (useReplicatedComposites || isCooperativeVectorType(vectorType)) && (numComponents > 0);
  2852. if (replicate) {
  2853. numComponents = 1;
  2854. addCapability(spv::Capability::ReplicatedCompositesEXT);
  2855. addExtension(spv::E_SPV_EXT_replicated_composites);
  2856. }
  2857. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  2858. smear = new Instruction(getUniqueId(), vectorType, opcode);
  2859. smear->reserveOperands(numComponents);
  2860. for (int c = 0; c < numComponents; ++c)
  2861. smear->addIdOperand(scalar);
  2862. addInstruction(std::unique_ptr<Instruction>(smear));
  2863. }
  2864. return setPrecision(smear->getResultId(), precision);
  2865. }
  2866. // Comments in header
  2867. Id Builder::createBuiltinCall(Id resultType, Id builtins, int entryPoint, const std::vector<Id>& args)
  2868. {
  2869. Instruction* inst = new Instruction(getUniqueId(), resultType, Op::OpExtInst);
  2870. inst->reserveOperands(args.size() + 2);
  2871. inst->addIdOperand(builtins);
  2872. inst->addImmediateOperand(entryPoint);
  2873. for (int arg = 0; arg < (int)args.size(); ++arg)
  2874. inst->addIdOperand(args[arg]);
  2875. addInstruction(std::unique_ptr<Instruction>(inst));
  2876. return inst->getResultId();
  2877. }
  2878. // Accept all parameters needed to create a texture instruction.
  2879. // Create the correct instruction based on the inputs, and make the call.
  2880. Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse, bool fetch, bool proj, bool gather,
  2881. bool noImplicitLod, const TextureParameters& parameters, ImageOperandsMask signExtensionMask)
  2882. {
  2883. std::vector<Id> texArgs;
  2884. //
  2885. // Set up the fixed arguments
  2886. //
  2887. bool explicitLod = false;
  2888. texArgs.push_back(parameters.sampler);
  2889. texArgs.push_back(parameters.coords);
  2890. if (parameters.Dref != NoResult)
  2891. texArgs.push_back(parameters.Dref);
  2892. if (parameters.component != NoResult)
  2893. texArgs.push_back(parameters.component);
  2894. if (parameters.granularity != NoResult)
  2895. texArgs.push_back(parameters.granularity);
  2896. if (parameters.coarse != NoResult)
  2897. texArgs.push_back(parameters.coarse);
  2898. //
  2899. // Set up the optional arguments
  2900. //
  2901. size_t optArgNum = texArgs.size(); // the position of the mask for the optional arguments, if any.
  2902. ImageOperandsMask mask = ImageOperandsMask::MaskNone; // the mask operand
  2903. if (parameters.bias) {
  2904. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Bias);
  2905. texArgs.push_back(parameters.bias);
  2906. }
  2907. if (parameters.lod) {
  2908. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  2909. texArgs.push_back(parameters.lod);
  2910. explicitLod = true;
  2911. } else if (parameters.gradX) {
  2912. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Grad);
  2913. texArgs.push_back(parameters.gradX);
  2914. texArgs.push_back(parameters.gradY);
  2915. explicitLod = true;
  2916. } else if (noImplicitLod && ! fetch && ! gather) {
  2917. // have to explicitly use lod of 0 if not allowed to have them be implicit, and
  2918. // we would otherwise be about to issue an implicit instruction
  2919. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  2920. texArgs.push_back(makeFloatConstant(0.0));
  2921. explicitLod = true;
  2922. }
  2923. if (parameters.offset) {
  2924. if (isConstant(parameters.offset))
  2925. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffset);
  2926. else {
  2927. addCapability(Capability::ImageGatherExtended);
  2928. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offset);
  2929. }
  2930. texArgs.push_back(parameters.offset);
  2931. }
  2932. if (parameters.offsets) {
  2933. if (!isConstant(parameters.offsets) && sourceLang == spv::SourceLanguage::GLSL) {
  2934. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offsets);
  2935. } else {
  2936. addCapability(Capability::ImageGatherExtended);
  2937. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffsets);
  2938. }
  2939. texArgs.push_back(parameters.offsets);
  2940. }
  2941. if (parameters.sample) {
  2942. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Sample);
  2943. texArgs.push_back(parameters.sample);
  2944. }
  2945. if (parameters.lodClamp) {
  2946. // capability if this bit is used
  2947. addCapability(Capability::MinLod);
  2948. mask = (ImageOperandsMask)(mask | ImageOperandsMask::MinLod);
  2949. texArgs.push_back(parameters.lodClamp);
  2950. }
  2951. if (parameters.nonprivate) {
  2952. mask = mask | ImageOperandsMask::NonPrivateTexelKHR;
  2953. }
  2954. if (parameters.volatil) {
  2955. mask = mask | ImageOperandsMask::VolatileTexelKHR;
  2956. }
  2957. if (parameters.nontemporal) {
  2958. mask = mask | ImageOperandsMask::Nontemporal;
  2959. }
  2960. mask = mask | signExtensionMask;
  2961. // insert the operand for the mask, if any bits were set.
  2962. if (mask != ImageOperandsMask::MaskNone)
  2963. texArgs.insert(texArgs.begin() + optArgNum, (Id)mask);
  2964. //
  2965. // Set up the instruction
  2966. //
  2967. Op opCode = Op::OpNop; // All paths below need to set this
  2968. if (fetch) {
  2969. if (sparse)
  2970. opCode = Op::OpImageSparseFetch;
  2971. else
  2972. opCode = Op::OpImageFetch;
  2973. } else if (parameters.granularity && parameters.coarse) {
  2974. opCode = Op::OpImageSampleFootprintNV;
  2975. } else if (gather) {
  2976. if (parameters.Dref)
  2977. if (sparse)
  2978. opCode = Op::OpImageSparseDrefGather;
  2979. else
  2980. opCode = Op::OpImageDrefGather;
  2981. else
  2982. if (sparse)
  2983. opCode = Op::OpImageSparseGather;
  2984. else
  2985. opCode = Op::OpImageGather;
  2986. } else if (explicitLod) {
  2987. if (parameters.Dref) {
  2988. if (proj)
  2989. if (sparse)
  2990. opCode = Op::OpImageSparseSampleProjDrefExplicitLod;
  2991. else
  2992. opCode = Op::OpImageSampleProjDrefExplicitLod;
  2993. else
  2994. if (sparse)
  2995. opCode = Op::OpImageSparseSampleDrefExplicitLod;
  2996. else
  2997. opCode = Op::OpImageSampleDrefExplicitLod;
  2998. } else {
  2999. if (proj)
  3000. if (sparse)
  3001. opCode = Op::OpImageSparseSampleProjExplicitLod;
  3002. else
  3003. opCode = Op::OpImageSampleProjExplicitLod;
  3004. else
  3005. if (sparse)
  3006. opCode = Op::OpImageSparseSampleExplicitLod;
  3007. else
  3008. opCode = Op::OpImageSampleExplicitLod;
  3009. }
  3010. } else {
  3011. if (parameters.Dref) {
  3012. if (proj)
  3013. if (sparse)
  3014. opCode = Op::OpImageSparseSampleProjDrefImplicitLod;
  3015. else
  3016. opCode = Op::OpImageSampleProjDrefImplicitLod;
  3017. else
  3018. if (sparse)
  3019. opCode = Op::OpImageSparseSampleDrefImplicitLod;
  3020. else
  3021. opCode = Op::OpImageSampleDrefImplicitLod;
  3022. } else {
  3023. if (proj)
  3024. if (sparse)
  3025. opCode = Op::OpImageSparseSampleProjImplicitLod;
  3026. else
  3027. opCode = Op::OpImageSampleProjImplicitLod;
  3028. else
  3029. if (sparse)
  3030. opCode = Op::OpImageSparseSampleImplicitLod;
  3031. else
  3032. opCode = Op::OpImageSampleImplicitLod;
  3033. }
  3034. }
  3035. // See if the result type is expecting a smeared result.
  3036. // This happens when a legacy shadow*() call is made, which
  3037. // gets a vec4 back instead of a float.
  3038. Id smearedType = resultType;
  3039. if (! isScalarType(resultType)) {
  3040. switch (opCode) {
  3041. case Op::OpImageSampleDrefImplicitLod:
  3042. case Op::OpImageSampleDrefExplicitLod:
  3043. case Op::OpImageSampleProjDrefImplicitLod:
  3044. case Op::OpImageSampleProjDrefExplicitLod:
  3045. resultType = getScalarTypeId(resultType);
  3046. break;
  3047. default:
  3048. break;
  3049. }
  3050. }
  3051. Id typeId0 = 0;
  3052. Id typeId1 = 0;
  3053. if (sparse) {
  3054. typeId0 = resultType;
  3055. typeId1 = getDerefTypeId(parameters.texelOut);
  3056. resultType = makeStructResultType(typeId0, typeId1);
  3057. }
  3058. // Build the SPIR-V instruction
  3059. Instruction* textureInst = new Instruction(getUniqueId(), resultType, opCode);
  3060. textureInst->reserveOperands(optArgNum + (texArgs.size() - (optArgNum + 1)));
  3061. for (size_t op = 0; op < optArgNum; ++op)
  3062. textureInst->addIdOperand(texArgs[op]);
  3063. if (optArgNum < texArgs.size())
  3064. textureInst->addImmediateOperand(texArgs[optArgNum]);
  3065. for (size_t op = optArgNum + 1; op < texArgs.size(); ++op)
  3066. textureInst->addIdOperand(texArgs[op]);
  3067. setPrecision(textureInst->getResultId(), precision);
  3068. addInstruction(std::unique_ptr<Instruction>(textureInst));
  3069. Id resultId = textureInst->getResultId();
  3070. if (sparse) {
  3071. // set capability
  3072. addCapability(Capability::SparseResidency);
  3073. // Decode the return type that was a special structure
  3074. createStore(createCompositeExtract(resultId, typeId1, 1), parameters.texelOut);
  3075. resultId = createCompositeExtract(resultId, typeId0, 0);
  3076. setPrecision(resultId, precision);
  3077. } else {
  3078. // When a smear is needed, do it, as per what was computed
  3079. // above when resultType was changed to a scalar type.
  3080. if (resultType != smearedType)
  3081. resultId = smearScalar(precision, resultId, smearedType);
  3082. }
  3083. return resultId;
  3084. }
  3085. // Comments in header
  3086. Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameters, bool isUnsignedResult)
  3087. {
  3088. // Figure out the result type
  3089. Id resultType = 0;
  3090. switch (opCode) {
  3091. case Op::OpImageQuerySize:
  3092. case Op::OpImageQuerySizeLod:
  3093. {
  3094. int numComponents = 0;
  3095. switch (getTypeDimensionality(getImageType(parameters.sampler))) {
  3096. case Dim::Dim1D:
  3097. case Dim::Buffer:
  3098. numComponents = 1;
  3099. break;
  3100. case Dim::Dim2D:
  3101. case Dim::Cube:
  3102. case Dim::Rect:
  3103. case Dim::SubpassData:
  3104. numComponents = 2;
  3105. break;
  3106. case Dim::Dim3D:
  3107. numComponents = 3;
  3108. break;
  3109. default:
  3110. assert(0);
  3111. break;
  3112. }
  3113. if (isArrayedImageType(getImageType(parameters.sampler)))
  3114. ++numComponents;
  3115. Id intType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3116. if (numComponents == 1)
  3117. resultType = intType;
  3118. else
  3119. resultType = makeVectorType(intType, numComponents);
  3120. break;
  3121. }
  3122. case Op::OpImageQueryLod:
  3123. resultType = makeVectorType(getScalarTypeId(getTypeId(parameters.coords)), 2);
  3124. break;
  3125. case Op::OpImageQueryLevels:
  3126. case Op::OpImageQuerySamples:
  3127. resultType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3128. break;
  3129. default:
  3130. assert(0);
  3131. break;
  3132. }
  3133. Instruction* query = new Instruction(getUniqueId(), resultType, opCode);
  3134. query->addIdOperand(parameters.sampler);
  3135. if (parameters.coords)
  3136. query->addIdOperand(parameters.coords);
  3137. if (parameters.lod)
  3138. query->addIdOperand(parameters.lod);
  3139. addInstruction(std::unique_ptr<Instruction>(query));
  3140. addCapability(Capability::ImageQuery);
  3141. return query->getResultId();
  3142. }
  3143. // External comments in header.
  3144. // Operates recursively to visit the composite's hierarchy.
  3145. Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, bool equal)
  3146. {
  3147. Id boolType = makeBoolType();
  3148. Id valueType = getTypeId(value1);
  3149. Id resultId = NoResult;
  3150. int numConstituents = getNumTypeConstituents(valueType);
  3151. // Scalars and Vectors
  3152. if (isScalarType(valueType) || isVectorType(valueType)) {
  3153. assert(valueType == getTypeId(value2));
  3154. // These just need a single comparison, just have
  3155. // to figure out what it is.
  3156. Op op;
  3157. switch (getMostBasicTypeClass(valueType)) {
  3158. case Op::OpTypeFloat:
  3159. op = equal ? Op::OpFOrdEqual : Op::OpFUnordNotEqual;
  3160. break;
  3161. case Op::OpTypeInt:
  3162. default:
  3163. op = equal ? Op::OpIEqual : Op::OpINotEqual;
  3164. break;
  3165. case Op::OpTypeBool:
  3166. op = equal ? Op::OpLogicalEqual : Op::OpLogicalNotEqual;
  3167. precision = NoPrecision;
  3168. break;
  3169. }
  3170. if (isScalarType(valueType)) {
  3171. // scalar
  3172. resultId = createBinOp(op, boolType, value1, value2);
  3173. } else {
  3174. // vector
  3175. resultId = createBinOp(op, makeVectorType(boolType, numConstituents), value1, value2);
  3176. setPrecision(resultId, precision);
  3177. // reduce vector compares...
  3178. resultId = createUnaryOp(equal ? Op::OpAll : Op::OpAny, boolType, resultId);
  3179. }
  3180. return setPrecision(resultId, precision);
  3181. }
  3182. // Only structs, arrays, and matrices should be left.
  3183. // They share in common the reduction operation across their constituents.
  3184. assert(isAggregateType(valueType) || isMatrixType(valueType));
  3185. // Compare each pair of constituents
  3186. for (int constituent = 0; constituent < numConstituents; ++constituent) {
  3187. std::vector<unsigned> indexes(1, constituent);
  3188. Id constituentType1 = getContainedTypeId(getTypeId(value1), constituent);
  3189. Id constituentType2 = getContainedTypeId(getTypeId(value2), constituent);
  3190. Id constituent1 = createCompositeExtract(value1, constituentType1, indexes);
  3191. Id constituent2 = createCompositeExtract(value2, constituentType2, indexes);
  3192. Id subResultId = createCompositeCompare(precision, constituent1, constituent2, equal);
  3193. if (constituent == 0)
  3194. resultId = subResultId;
  3195. else
  3196. resultId = setPrecision(createBinOp(equal ? Op::OpLogicalAnd : Op::OpLogicalOr, boolType, resultId, subResultId),
  3197. precision);
  3198. }
  3199. return resultId;
  3200. }
  3201. // OpCompositeConstruct
  3202. Id Builder::createCompositeConstruct(Id typeId, const std::vector<Id>& constituents)
  3203. {
  3204. assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 &&
  3205. getNumTypeConstituents(typeId) == constituents.size()) ||
  3206. (isCooperativeVectorType(typeId) && constituents.size() == 1));
  3207. if (generatingOpCodeForSpecConst) {
  3208. // Sometime, even in spec-constant-op mode, the constant composite to be
  3209. // constructed may not be a specialization constant.
  3210. // e.g.:
  3211. // const mat2 m2 = mat2(a_spec_const, a_front_end_const, another_front_end_const, third_front_end_const);
  3212. // The first column vector should be a spec constant one, as a_spec_const is a spec constant.
  3213. // The second column vector should NOT be spec constant, as it does not contain any spec constants.
  3214. // To handle such cases, we check the constituents of the constant vector to determine whether this
  3215. // vector should be created as a spec constant.
  3216. return makeCompositeConstant(typeId, constituents,
  3217. std::any_of(constituents.begin(), constituents.end(),
  3218. [&](spv::Id id) { return isSpecConstant(id); }));
  3219. }
  3220. bool replicate = false;
  3221. size_t numConstituents = constituents.size();
  3222. if (useReplicatedComposites || isCooperativeVectorType(typeId)) {
  3223. replicate = numConstituents > 0 &&
  3224. std::equal(constituents.begin() + 1, constituents.end(), constituents.begin());
  3225. }
  3226. if (replicate) {
  3227. numConstituents = 1;
  3228. addCapability(spv::Capability::ReplicatedCompositesEXT);
  3229. addExtension(spv::E_SPV_EXT_replicated_composites);
  3230. }
  3231. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  3232. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3233. op->reserveOperands(constituents.size());
  3234. for (size_t c = 0; c < numConstituents; ++c)
  3235. op->addIdOperand(constituents[c]);
  3236. addInstruction(std::unique_ptr<Instruction>(op));
  3237. return op->getResultId();
  3238. }
  3239. // coopmat conversion
  3240. Id Builder::createCooperativeMatrixConversion(Id typeId, Id source)
  3241. {
  3242. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpCooperativeMatrixConvertNV);
  3243. op->addIdOperand(source);
  3244. addInstruction(std::unique_ptr<Instruction>(op));
  3245. return op->getResultId();
  3246. }
  3247. // coopmat reduce
  3248. Id Builder::createCooperativeMatrixReduce(Op opcode, Id typeId, Id source, unsigned int mask, Id func)
  3249. {
  3250. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3251. op->addIdOperand(source);
  3252. op->addImmediateOperand(mask);
  3253. op->addIdOperand(func);
  3254. addInstruction(std::unique_ptr<Instruction>(op));
  3255. return op->getResultId();
  3256. }
  3257. // coopmat per-element operation
  3258. Id Builder::createCooperativeMatrixPerElementOp(Id typeId, const std::vector<Id>& operands)
  3259. {
  3260. Instruction* op = new Instruction(getUniqueId(), typeId, spv::Op::OpCooperativeMatrixPerElementOpNV);
  3261. // skip operand[0], which is where the result is stored
  3262. for (uint32_t i = 1; i < operands.size(); ++i) {
  3263. op->addIdOperand(operands[i]);
  3264. }
  3265. addInstruction(std::unique_ptr<Instruction>(op));
  3266. return op->getResultId();
  3267. }
  3268. // Vector or scalar constructor
  3269. Id Builder::createConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3270. {
  3271. Id result = NoResult;
  3272. unsigned int numTargetComponents = getNumTypeComponents(resultTypeId);
  3273. unsigned int targetComponent = 0;
  3274. // Special case: when calling a vector constructor with a single scalar
  3275. // argument, smear the scalar
  3276. if (sources.size() == 1 && isScalar(sources[0]) && (numTargetComponents > 1 || isCooperativeVectorType(resultTypeId)))
  3277. return smearScalar(precision, sources[0], resultTypeId);
  3278. // Special case: 2 vectors of equal size
  3279. if (sources.size() == 1 && isVector(sources[0]) && numTargetComponents == getNumComponents(sources[0])) {
  3280. assert(resultTypeId == getTypeId(sources[0]));
  3281. return sources[0];
  3282. }
  3283. // accumulate the arguments for OpCompositeConstruct
  3284. std::vector<Id> constituents;
  3285. Id scalarTypeId = getScalarTypeId(resultTypeId);
  3286. // lambda to store the result of visiting an argument component
  3287. const auto latchResult = [&](Id comp) {
  3288. if (numTargetComponents > 1)
  3289. constituents.push_back(comp);
  3290. else
  3291. result = comp;
  3292. ++targetComponent;
  3293. };
  3294. // lambda to visit a vector argument's components
  3295. const auto accumulateVectorConstituents = [&](Id sourceArg) {
  3296. unsigned int sourceSize = getNumComponents(sourceArg);
  3297. unsigned int sourcesToUse = sourceSize;
  3298. if (sourcesToUse + targetComponent > numTargetComponents)
  3299. sourcesToUse = numTargetComponents - targetComponent;
  3300. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3301. std::vector<unsigned> swiz;
  3302. swiz.push_back(s);
  3303. latchResult(createRvalueSwizzle(precision, scalarTypeId, sourceArg, swiz));
  3304. }
  3305. };
  3306. // lambda to visit a matrix argument's components
  3307. const auto accumulateMatrixConstituents = [&](Id sourceArg) {
  3308. unsigned int sourceSize = getNumColumns(sourceArg) * getNumRows(sourceArg);
  3309. unsigned int sourcesToUse = sourceSize;
  3310. if (sourcesToUse + targetComponent > numTargetComponents)
  3311. sourcesToUse = numTargetComponents - targetComponent;
  3312. unsigned int col = 0;
  3313. unsigned int row = 0;
  3314. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3315. if (row >= getNumRows(sourceArg)) {
  3316. row = 0;
  3317. col++;
  3318. }
  3319. std::vector<Id> indexes;
  3320. indexes.push_back(col);
  3321. indexes.push_back(row);
  3322. latchResult(createCompositeExtract(sourceArg, scalarTypeId, indexes));
  3323. row++;
  3324. }
  3325. };
  3326. // Go through the source arguments, each one could have either
  3327. // a single or multiple components to contribute.
  3328. for (unsigned int i = 0; i < sources.size(); ++i) {
  3329. if (isScalar(sources[i]) || isPointer(sources[i]))
  3330. latchResult(sources[i]);
  3331. else if (isVector(sources[i]) || isCooperativeVector(sources[i]))
  3332. accumulateVectorConstituents(sources[i]);
  3333. else if (isMatrix(sources[i]))
  3334. accumulateMatrixConstituents(sources[i]);
  3335. else
  3336. assert(0);
  3337. if (targetComponent >= numTargetComponents)
  3338. break;
  3339. }
  3340. // If the result is a vector, make it from the gathered constituents.
  3341. if (constituents.size() > 0) {
  3342. result = createCompositeConstruct(resultTypeId, constituents);
  3343. return setPrecision(result, precision);
  3344. } else {
  3345. // Precision was set when generating this component.
  3346. return result;
  3347. }
  3348. }
  3349. // Comments in header
  3350. Id Builder::createMatrixConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3351. {
  3352. Id componentTypeId = getScalarTypeId(resultTypeId);
  3353. unsigned int numCols = getTypeNumColumns(resultTypeId);
  3354. unsigned int numRows = getTypeNumRows(resultTypeId);
  3355. Instruction* instr = module.getInstruction(componentTypeId);
  3356. const unsigned bitCount = instr->getImmediateOperand(0);
  3357. // Optimize matrix constructed from a bigger matrix
  3358. if (isMatrix(sources[0]) && getNumColumns(sources[0]) >= numCols && getNumRows(sources[0]) >= numRows) {
  3359. // To truncate the matrix to a smaller number of rows/columns, we need to:
  3360. // 1. For each column, extract the column and truncate it to the required size using shuffle
  3361. // 2. Assemble the resulting matrix from all columns
  3362. Id matrix = sources[0];
  3363. Id columnTypeId = getContainedTypeId(resultTypeId);
  3364. Id sourceColumnTypeId = getContainedTypeId(getTypeId(matrix));
  3365. std::vector<unsigned> channels;
  3366. for (unsigned int row = 0; row < numRows; ++row)
  3367. channels.push_back(row);
  3368. std::vector<Id> matrixColumns;
  3369. for (unsigned int col = 0; col < numCols; ++col) {
  3370. std::vector<unsigned> indexes;
  3371. indexes.push_back(col);
  3372. Id colv = createCompositeExtract(matrix, sourceColumnTypeId, indexes);
  3373. setPrecision(colv, precision);
  3374. if (numRows != getNumRows(matrix)) {
  3375. matrixColumns.push_back(createRvalueSwizzle(precision, columnTypeId, colv, channels));
  3376. } else {
  3377. matrixColumns.push_back(colv);
  3378. }
  3379. }
  3380. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3381. }
  3382. // Detect a matrix being constructed from a repeated vector of the correct size.
  3383. // Create the composite directly from it.
  3384. if (sources.size() == numCols && isVector(sources[0]) && getNumComponents(sources[0]) == numRows &&
  3385. std::equal(sources.begin() + 1, sources.end(), sources.begin())) {
  3386. return setPrecision(createCompositeConstruct(resultTypeId, sources), precision);
  3387. }
  3388. // Otherwise, will use a two step process
  3389. // 1. make a compile-time 2D array of values
  3390. // 2. construct a matrix from that array
  3391. // Step 1.
  3392. // initialize the array to the identity matrix
  3393. Id ids[maxMatrixSize][maxMatrixSize];
  3394. Id one = (bitCount == 64 ? makeDoubleConstant(1.0) : makeFloatConstant(1.0));
  3395. Id zero = (bitCount == 64 ? makeDoubleConstant(0.0) : makeFloatConstant(0.0));
  3396. for (int col = 0; col < 4; ++col) {
  3397. for (int row = 0; row < 4; ++row) {
  3398. if (col == row)
  3399. ids[col][row] = one;
  3400. else
  3401. ids[col][row] = zero;
  3402. }
  3403. }
  3404. // modify components as dictated by the arguments
  3405. if (sources.size() == 1 && isScalar(sources[0])) {
  3406. // a single scalar; resets the diagonals
  3407. for (int col = 0; col < 4; ++col)
  3408. ids[col][col] = sources[0];
  3409. } else if (isMatrix(sources[0])) {
  3410. // constructing from another matrix; copy over the parts that exist in both the argument and constructee
  3411. Id matrix = sources[0];
  3412. unsigned int minCols = std::min(numCols, getNumColumns(matrix));
  3413. unsigned int minRows = std::min(numRows, getNumRows(matrix));
  3414. for (unsigned int col = 0; col < minCols; ++col) {
  3415. std::vector<unsigned> indexes;
  3416. indexes.push_back(col);
  3417. for (unsigned int row = 0; row < minRows; ++row) {
  3418. indexes.push_back(row);
  3419. ids[col][row] = createCompositeExtract(matrix, componentTypeId, indexes);
  3420. indexes.pop_back();
  3421. setPrecision(ids[col][row], precision);
  3422. }
  3423. }
  3424. } else {
  3425. // fill in the matrix in column-major order with whatever argument components are available
  3426. unsigned int row = 0;
  3427. unsigned int col = 0;
  3428. for (unsigned int arg = 0; arg < sources.size() && col < numCols; ++arg) {
  3429. Id argComp = sources[arg];
  3430. for (unsigned int comp = 0; comp < getNumComponents(sources[arg]); ++comp) {
  3431. if (getNumComponents(sources[arg]) > 1) {
  3432. argComp = createCompositeExtract(sources[arg], componentTypeId, comp);
  3433. setPrecision(argComp, precision);
  3434. }
  3435. ids[col][row++] = argComp;
  3436. if (row == numRows) {
  3437. row = 0;
  3438. col++;
  3439. }
  3440. if (col == numCols) {
  3441. // If more components are provided than fit the matrix, discard the rest.
  3442. break;
  3443. }
  3444. }
  3445. }
  3446. }
  3447. // Step 2: Construct a matrix from that array.
  3448. // First make the column vectors, then make the matrix.
  3449. // make the column vectors
  3450. Id columnTypeId = getContainedTypeId(resultTypeId);
  3451. std::vector<Id> matrixColumns;
  3452. for (unsigned int col = 0; col < numCols; ++col) {
  3453. std::vector<Id> vectorComponents;
  3454. for (unsigned int row = 0; row < numRows; ++row)
  3455. vectorComponents.push_back(ids[col][row]);
  3456. Id column = createCompositeConstruct(columnTypeId, vectorComponents);
  3457. setPrecision(column, precision);
  3458. matrixColumns.push_back(column);
  3459. }
  3460. // make the matrix
  3461. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3462. }
  3463. // Comments in header
  3464. Builder::If::If(Id cond, SelectionControlMask ctrl, Builder& gb) :
  3465. builder(gb),
  3466. condition(cond),
  3467. control(ctrl),
  3468. elseBlock(nullptr)
  3469. {
  3470. function = &builder.getBuildPoint()->getParent();
  3471. // make the blocks, but only put the then-block into the function,
  3472. // the else-block and merge-block will be added later, in order, after
  3473. // earlier code is emitted
  3474. thenBlock = new Block(builder.getUniqueId(), *function);
  3475. mergeBlock = new Block(builder.getUniqueId(), *function);
  3476. // Save the current block, so that we can add in the flow control split when
  3477. // makeEndIf is called.
  3478. headerBlock = builder.getBuildPoint();
  3479. builder.createSelectionMerge(mergeBlock, control);
  3480. function->addBlock(thenBlock);
  3481. builder.setBuildPoint(thenBlock);
  3482. }
  3483. // Comments in header
  3484. void Builder::If::makeBeginElse()
  3485. {
  3486. // Close out the "then" by having it jump to the mergeBlock
  3487. builder.createBranch(true, mergeBlock);
  3488. // Make the first else block and add it to the function
  3489. elseBlock = new Block(builder.getUniqueId(), *function);
  3490. function->addBlock(elseBlock);
  3491. // Start building the else block
  3492. builder.setBuildPoint(elseBlock);
  3493. }
  3494. // Comments in header
  3495. void Builder::If::makeEndIf()
  3496. {
  3497. // jump to the merge block
  3498. builder.createBranch(true, mergeBlock);
  3499. // Go back to the headerBlock and make the flow control split
  3500. builder.setBuildPoint(headerBlock);
  3501. if (elseBlock)
  3502. builder.createConditionalBranch(condition, thenBlock, elseBlock);
  3503. else
  3504. builder.createConditionalBranch(condition, thenBlock, mergeBlock);
  3505. // add the merge block to the function
  3506. function->addBlock(mergeBlock);
  3507. builder.setBuildPoint(mergeBlock);
  3508. }
  3509. // Comments in header
  3510. void Builder::makeSwitch(Id selector, SelectionControlMask control, int numSegments, const std::vector<int>& caseValues,
  3511. const std::vector<int>& valueIndexToSegment, int defaultSegment,
  3512. std::vector<Block*>& segmentBlocks)
  3513. {
  3514. Function& function = buildPoint->getParent();
  3515. // make all the blocks
  3516. for (int s = 0; s < numSegments; ++s)
  3517. segmentBlocks.push_back(new Block(getUniqueId(), function));
  3518. Block* mergeBlock = new Block(getUniqueId(), function);
  3519. // make and insert the switch's selection-merge instruction
  3520. createSelectionMerge(mergeBlock, control);
  3521. // make the switch instruction
  3522. Instruction* switchInst = new Instruction(NoResult, NoType, Op::OpSwitch);
  3523. switchInst->reserveOperands((caseValues.size() * 2) + 2);
  3524. switchInst->addIdOperand(selector);
  3525. auto defaultOrMerge = (defaultSegment >= 0) ? segmentBlocks[defaultSegment] : mergeBlock;
  3526. switchInst->addIdOperand(defaultOrMerge->getId());
  3527. defaultOrMerge->addPredecessor(buildPoint);
  3528. for (int i = 0; i < (int)caseValues.size(); ++i) {
  3529. switchInst->addImmediateOperand(caseValues[i]);
  3530. switchInst->addIdOperand(segmentBlocks[valueIndexToSegment[i]]->getId());
  3531. segmentBlocks[valueIndexToSegment[i]]->addPredecessor(buildPoint);
  3532. }
  3533. addInstruction(std::unique_ptr<Instruction>(switchInst));
  3534. // push the merge block
  3535. switchMerges.push(mergeBlock);
  3536. }
  3537. // Comments in header
  3538. void Builder::addSwitchBreak(bool implicit)
  3539. {
  3540. // branch to the top of the merge block stack
  3541. createBranch(implicit, switchMerges.top());
  3542. createAndSetNoPredecessorBlock("post-switch-break");
  3543. }
  3544. // Comments in header
  3545. void Builder::nextSwitchSegment(std::vector<Block*>& segmentBlock, int nextSegment)
  3546. {
  3547. int lastSegment = nextSegment - 1;
  3548. if (lastSegment >= 0) {
  3549. // Close out previous segment by jumping, if necessary, to next segment
  3550. if (! buildPoint->isTerminated())
  3551. createBranch(true, segmentBlock[nextSegment]);
  3552. }
  3553. Block* block = segmentBlock[nextSegment];
  3554. block->getParent().addBlock(block);
  3555. setBuildPoint(block);
  3556. }
  3557. // Comments in header
  3558. void Builder::endSwitch(std::vector<Block*>& /*segmentBlock*/)
  3559. {
  3560. // Close out previous segment by jumping, if necessary, to next segment
  3561. if (! buildPoint->isTerminated())
  3562. addSwitchBreak(true);
  3563. switchMerges.top()->getParent().addBlock(switchMerges.top());
  3564. setBuildPoint(switchMerges.top());
  3565. switchMerges.pop();
  3566. }
  3567. Block& Builder::makeNewBlock()
  3568. {
  3569. Function& function = buildPoint->getParent();
  3570. auto block = new Block(getUniqueId(), function);
  3571. function.addBlock(block);
  3572. return *block;
  3573. }
  3574. Builder::LoopBlocks& Builder::makeNewLoop()
  3575. {
  3576. // This verbosity is needed to simultaneously get the same behavior
  3577. // everywhere (id's in the same order), have a syntax that works
  3578. // across lots of versions of C++, have no warnings from pedantic
  3579. // compilation modes, and leave the rest of the code alone.
  3580. Block& head = makeNewBlock();
  3581. Block& body = makeNewBlock();
  3582. Block& merge = makeNewBlock();
  3583. Block& continue_target = makeNewBlock();
  3584. LoopBlocks blocks(head, body, merge, continue_target);
  3585. loops.push(blocks);
  3586. return loops.top();
  3587. }
  3588. void Builder::createLoopContinue()
  3589. {
  3590. createBranch(false, &loops.top().continue_target);
  3591. // Set up a block for dead code.
  3592. createAndSetNoPredecessorBlock("post-loop-continue");
  3593. }
  3594. void Builder::createLoopExit()
  3595. {
  3596. createBranch(false, &loops.top().merge);
  3597. // Set up a block for dead code.
  3598. createAndSetNoPredecessorBlock("post-loop-break");
  3599. }
  3600. void Builder::closeLoop()
  3601. {
  3602. loops.pop();
  3603. }
  3604. void Builder::clearAccessChain()
  3605. {
  3606. accessChain.base = NoResult;
  3607. accessChain.indexChain.clear();
  3608. accessChain.instr = NoResult;
  3609. accessChain.swizzle.clear();
  3610. accessChain.component = NoResult;
  3611. accessChain.preSwizzleBaseType = NoType;
  3612. accessChain.isRValue = false;
  3613. accessChain.coherentFlags.clear();
  3614. accessChain.alignment = 0;
  3615. }
  3616. // Comments in header
  3617. void Builder::accessChainPushSwizzle(std::vector<unsigned>& swizzle, Id preSwizzleBaseType,
  3618. AccessChain::CoherentFlags coherentFlags, unsigned int alignment)
  3619. {
  3620. accessChain.coherentFlags |= coherentFlags;
  3621. accessChain.alignment |= alignment;
  3622. // swizzles can be stacked in GLSL, but simplified to a single
  3623. // one here; the base type doesn't change
  3624. if (accessChain.preSwizzleBaseType == NoType)
  3625. accessChain.preSwizzleBaseType = preSwizzleBaseType;
  3626. // if needed, propagate the swizzle for the current access chain
  3627. if (accessChain.swizzle.size() > 0) {
  3628. std::vector<unsigned> oldSwizzle = accessChain.swizzle;
  3629. accessChain.swizzle.resize(0);
  3630. for (unsigned int i = 0; i < swizzle.size(); ++i) {
  3631. assert(swizzle[i] < oldSwizzle.size());
  3632. accessChain.swizzle.push_back(oldSwizzle[swizzle[i]]);
  3633. }
  3634. } else
  3635. accessChain.swizzle = swizzle;
  3636. // determine if we need to track this swizzle anymore
  3637. simplifyAccessChainSwizzle();
  3638. }
  3639. // Comments in header
  3640. void Builder::accessChainStore(Id rvalue, Decoration nonUniform, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment)
  3641. {
  3642. assert(accessChain.isRValue == false);
  3643. transferAccessChainSwizzle(true);
  3644. // If a swizzle exists and is not full and is not dynamic, then the swizzle will be broken into individual stores.
  3645. if (accessChain.swizzle.size() > 0 &&
  3646. getNumTypeComponents(getResultingAccessChainType()) != accessChain.swizzle.size() &&
  3647. accessChain.component == NoResult) {
  3648. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  3649. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle[i]));
  3650. accessChain.instr = NoResult;
  3651. Id base = collapseAccessChain();
  3652. addDecoration(base, nonUniform);
  3653. accessChain.indexChain.pop_back();
  3654. accessChain.instr = NoResult;
  3655. // dynamic component should be gone
  3656. assert(accessChain.component == NoResult);
  3657. Id source = createCompositeExtract(rvalue, getContainedTypeId(getTypeId(rvalue)), i);
  3658. // take LSB of alignment
  3659. alignment = alignment & ~(alignment & (alignment-1));
  3660. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3661. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3662. }
  3663. createStore(source, base, memoryAccess, scope, alignment);
  3664. }
  3665. }
  3666. else {
  3667. Id base = collapseAccessChain();
  3668. addDecoration(base, nonUniform);
  3669. Id source = rvalue;
  3670. // dynamic component should be gone
  3671. assert(accessChain.component == NoResult);
  3672. // If swizzle still exists, it may be out-of-order, we must load the target vector,
  3673. // extract and insert elements to perform writeMask and/or swizzle.
  3674. if (accessChain.swizzle.size() > 0) {
  3675. Id tempBaseId = createLoad(base, spv::NoPrecision);
  3676. source = createLvalueSwizzle(getTypeId(tempBaseId), tempBaseId, source, accessChain.swizzle);
  3677. }
  3678. // take LSB of alignment
  3679. alignment = alignment & ~(alignment & (alignment-1));
  3680. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3681. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3682. }
  3683. createStore(source, base, memoryAccess, scope, alignment);
  3684. }
  3685. }
  3686. // Comments in header
  3687. Id Builder::accessChainLoad(Decoration precision, Decoration l_nonUniform,
  3688. Decoration r_nonUniform, Id resultType, spv::MemoryAccessMask memoryAccess,
  3689. spv::Scope scope, unsigned int alignment)
  3690. {
  3691. Id id;
  3692. if (accessChain.isRValue) {
  3693. // transfer access chain, but try to stay in registers
  3694. transferAccessChainSwizzle(false);
  3695. if (accessChain.indexChain.size() > 0) {
  3696. Id swizzleBase = accessChain.preSwizzleBaseType != NoType ? accessChain.preSwizzleBaseType : resultType;
  3697. // if all the accesses are constants, we can use OpCompositeExtract
  3698. std::vector<unsigned> indexes;
  3699. bool constant = true;
  3700. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  3701. if (isConstantScalar(accessChain.indexChain[i]))
  3702. indexes.push_back(getConstantScalar(accessChain.indexChain[i]));
  3703. else {
  3704. constant = false;
  3705. break;
  3706. }
  3707. }
  3708. if (constant) {
  3709. id = createCompositeExtract(accessChain.base, swizzleBase, indexes);
  3710. setPrecision(id, precision);
  3711. } else if (isCooperativeVector(accessChain.base)) {
  3712. assert(accessChain.indexChain.size() == 1);
  3713. id = createVectorExtractDynamic(accessChain.base, resultType, accessChain.indexChain[0]);
  3714. } else {
  3715. Id lValue = NoResult;
  3716. if (spvVersion >= Spv_1_4 && isValidInitializer(accessChain.base)) {
  3717. // make a new function variable for this r-value, using an initializer,
  3718. // and mark it as NonWritable so that downstream it can be detected as a lookup
  3719. // table
  3720. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  3721. "indexable", accessChain.base);
  3722. addDecoration(lValue, Decoration::NonWritable);
  3723. } else {
  3724. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  3725. "indexable");
  3726. // store into it
  3727. createStore(accessChain.base, lValue);
  3728. }
  3729. // move base to the new variable
  3730. accessChain.base = lValue;
  3731. accessChain.isRValue = false;
  3732. // load through the access chain
  3733. id = createLoad(collapseAccessChain(), precision);
  3734. }
  3735. } else
  3736. id = accessChain.base; // no precision, it was set when this was defined
  3737. } else {
  3738. transferAccessChainSwizzle(true);
  3739. // take LSB of alignment
  3740. alignment = alignment & ~(alignment & (alignment-1));
  3741. if (getStorageClass(accessChain.base) == StorageClass::PhysicalStorageBufferEXT) {
  3742. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3743. }
  3744. // load through the access chain
  3745. id = collapseAccessChain();
  3746. // Apply nonuniform both to the access chain and the loaded value.
  3747. // Buffer accesses need the access chain decorated, and this is where
  3748. // loaded image types get decorated. TODO: This should maybe move to
  3749. // createImageTextureFunctionCall.
  3750. addDecoration(id, l_nonUniform);
  3751. id = createLoad(id, precision, memoryAccess, scope, alignment);
  3752. addDecoration(id, r_nonUniform);
  3753. }
  3754. // Done, unless there are swizzles to do
  3755. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  3756. return id;
  3757. // Do remaining swizzling
  3758. // Do the basic swizzle
  3759. if (accessChain.swizzle.size() > 0) {
  3760. Id swizzledType = getScalarTypeId(getTypeId(id));
  3761. if (accessChain.swizzle.size() > 1)
  3762. swizzledType = makeVectorType(swizzledType, (int)accessChain.swizzle.size());
  3763. id = createRvalueSwizzle(precision, swizzledType, id, accessChain.swizzle);
  3764. }
  3765. // Do the dynamic component
  3766. if (accessChain.component != NoResult)
  3767. id = setPrecision(createVectorExtractDynamic(id, resultType, accessChain.component), precision);
  3768. addDecoration(id, r_nonUniform);
  3769. return id;
  3770. }
  3771. Id Builder::accessChainGetLValue()
  3772. {
  3773. assert(accessChain.isRValue == false);
  3774. transferAccessChainSwizzle(true);
  3775. Id lvalue = collapseAccessChain();
  3776. // If swizzle exists, it is out-of-order or not full, we must load the target vector,
  3777. // extract and insert elements to perform writeMask and/or swizzle. This does not
  3778. // go with getting a direct l-value pointer.
  3779. assert(accessChain.swizzle.size() == 0);
  3780. assert(accessChain.component == NoResult);
  3781. return lvalue;
  3782. }
  3783. // comment in header
  3784. Id Builder::accessChainGetInferredType()
  3785. {
  3786. // anything to operate on?
  3787. if (accessChain.base == NoResult)
  3788. return NoType;
  3789. Id type = getTypeId(accessChain.base);
  3790. // do initial dereference
  3791. if (! accessChain.isRValue)
  3792. type = getContainedTypeId(type);
  3793. // dereference each index
  3794. for (auto it = accessChain.indexChain.cbegin(); it != accessChain.indexChain.cend(); ++it) {
  3795. if (isStructType(type))
  3796. type = getContainedTypeId(type, getConstantScalar(*it));
  3797. else
  3798. type = getContainedTypeId(type);
  3799. }
  3800. // dereference swizzle
  3801. if (accessChain.swizzle.size() == 1)
  3802. type = getContainedTypeId(type);
  3803. else if (accessChain.swizzle.size() > 1)
  3804. type = makeVectorType(getContainedTypeId(type), (int)accessChain.swizzle.size());
  3805. // dereference component selection
  3806. if (accessChain.component)
  3807. type = getContainedTypeId(type);
  3808. return type;
  3809. }
  3810. void Builder::dump(std::vector<unsigned int>& out) const
  3811. {
  3812. // Header, before first instructions:
  3813. out.push_back(MagicNumber);
  3814. out.push_back(spvVersion);
  3815. out.push_back(builderNumber);
  3816. out.push_back(uniqueId + 1);
  3817. out.push_back(0);
  3818. // Capabilities
  3819. for (auto it = capabilities.cbegin(); it != capabilities.cend(); ++it) {
  3820. Instruction capInst(0, 0, Op::OpCapability);
  3821. capInst.addImmediateOperand(*it);
  3822. capInst.dump(out);
  3823. }
  3824. for (auto it = extensions.cbegin(); it != extensions.cend(); ++it) {
  3825. Instruction extInst(0, 0, Op::OpExtension);
  3826. extInst.addStringOperand(it->c_str());
  3827. extInst.dump(out);
  3828. }
  3829. dumpInstructions(out, imports);
  3830. Instruction memInst(0, 0, Op::OpMemoryModel);
  3831. memInst.addImmediateOperand(addressModel);
  3832. memInst.addImmediateOperand(memoryModel);
  3833. memInst.dump(out);
  3834. // Instructions saved up while building:
  3835. dumpInstructions(out, entryPoints);
  3836. dumpInstructions(out, executionModes);
  3837. // Debug instructions
  3838. dumpInstructions(out, strings);
  3839. dumpSourceInstructions(out);
  3840. for (int e = 0; e < (int)sourceExtensions.size(); ++e) {
  3841. Instruction sourceExtInst(0, 0, Op::OpSourceExtension);
  3842. sourceExtInst.addStringOperand(sourceExtensions[e]);
  3843. sourceExtInst.dump(out);
  3844. }
  3845. dumpInstructions(out, names);
  3846. dumpModuleProcesses(out);
  3847. // Annotation instructions
  3848. dumpInstructions(out, decorations);
  3849. dumpInstructions(out, constantsTypesGlobals);
  3850. dumpInstructions(out, externals);
  3851. // The functions
  3852. module.dump(out);
  3853. }
  3854. //
  3855. // Protected methods.
  3856. //
  3857. // Turn the described access chain in 'accessChain' into an instruction(s)
  3858. // computing its address. This *cannot* include complex swizzles, which must
  3859. // be handled after this is called.
  3860. //
  3861. // Can generate code.
  3862. Id Builder::collapseAccessChain()
  3863. {
  3864. assert(accessChain.isRValue == false);
  3865. // did we already emit an access chain for this?
  3866. if (accessChain.instr != NoResult)
  3867. return accessChain.instr;
  3868. // If we have a dynamic component, we can still transfer
  3869. // that into a final operand to the access chain. We need to remap the
  3870. // dynamic component through the swizzle to get a new dynamic component to
  3871. // update.
  3872. //
  3873. // This was not done in transferAccessChainSwizzle() because it might
  3874. // generate code.
  3875. remapDynamicSwizzle();
  3876. if (accessChain.component != NoResult) {
  3877. // transfer the dynamic component to the access chain
  3878. accessChain.indexChain.push_back(accessChain.component);
  3879. accessChain.component = NoResult;
  3880. }
  3881. // note that non-trivial swizzling is left pending
  3882. // do we have an access chain?
  3883. if (accessChain.indexChain.size() == 0)
  3884. return accessChain.base;
  3885. // emit the access chain
  3886. StorageClass storageClass = (StorageClass)module.getStorageClass(getTypeId(accessChain.base));
  3887. accessChain.instr = createAccessChain(storageClass, accessChain.base, accessChain.indexChain);
  3888. return accessChain.instr;
  3889. }
  3890. // For a dynamic component selection of a swizzle.
  3891. //
  3892. // Turn the swizzle and dynamic component into just a dynamic component.
  3893. //
  3894. // Generates code.
  3895. void Builder::remapDynamicSwizzle()
  3896. {
  3897. // do we have a swizzle to remap a dynamic component through?
  3898. if (accessChain.component != NoResult && accessChain.swizzle.size() > 1) {
  3899. // build a vector of the swizzle for the component to map into
  3900. std::vector<Id> components;
  3901. for (int c = 0; c < (int)accessChain.swizzle.size(); ++c)
  3902. components.push_back(makeUintConstant(accessChain.swizzle[c]));
  3903. Id mapType = makeVectorType(makeUintType(32), (int)accessChain.swizzle.size());
  3904. Id map = makeCompositeConstant(mapType, components);
  3905. // use it
  3906. accessChain.component = createVectorExtractDynamic(map, makeUintType(32), accessChain.component);
  3907. accessChain.swizzle.clear();
  3908. }
  3909. }
  3910. // clear out swizzle if it is redundant, that is reselecting the same components
  3911. // that would be present without the swizzle.
  3912. void Builder::simplifyAccessChainSwizzle()
  3913. {
  3914. // If the swizzle has fewer components than the vector, it is subsetting, and must stay
  3915. // to preserve that fact.
  3916. if (getNumTypeComponents(accessChain.preSwizzleBaseType) > accessChain.swizzle.size())
  3917. return;
  3918. // if components are out of order, it is a swizzle
  3919. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  3920. if (i != accessChain.swizzle[i])
  3921. return;
  3922. }
  3923. // otherwise, there is no need to track this swizzle
  3924. accessChain.swizzle.clear();
  3925. if (accessChain.component == NoResult)
  3926. accessChain.preSwizzleBaseType = NoType;
  3927. }
  3928. // To the extent any swizzling can become part of the chain
  3929. // of accesses instead of a post operation, make it so.
  3930. // If 'dynamic' is true, include transferring the dynamic component,
  3931. // otherwise, leave it pending.
  3932. //
  3933. // Does not generate code. just updates the access chain.
  3934. void Builder::transferAccessChainSwizzle(bool dynamic)
  3935. {
  3936. // non existent?
  3937. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  3938. return;
  3939. // too complex?
  3940. // (this requires either a swizzle, or generating code for a dynamic component)
  3941. if (accessChain.swizzle.size() > 1)
  3942. return;
  3943. // single component, either in the swizzle and/or dynamic component
  3944. if (accessChain.swizzle.size() == 1) {
  3945. assert(accessChain.component == NoResult);
  3946. // handle static component selection
  3947. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle.front()));
  3948. accessChain.swizzle.clear();
  3949. accessChain.preSwizzleBaseType = NoType;
  3950. } else if (dynamic && accessChain.component != NoResult) {
  3951. assert(accessChain.swizzle.size() == 0);
  3952. // handle dynamic component
  3953. accessChain.indexChain.push_back(accessChain.component);
  3954. accessChain.preSwizzleBaseType = NoType;
  3955. accessChain.component = NoResult;
  3956. }
  3957. }
  3958. // Utility method for creating a new block and setting the insert point to
  3959. // be in it. This is useful for flow-control operations that need a "dummy"
  3960. // block proceeding them (e.g. instructions after a discard, etc).
  3961. void Builder::createAndSetNoPredecessorBlock(const char* /*name*/)
  3962. {
  3963. Block* block = new Block(getUniqueId(), buildPoint->getParent());
  3964. block->setUnreachable();
  3965. buildPoint->getParent().addBlock(block);
  3966. setBuildPoint(block);
  3967. // if (name)
  3968. // addName(block->getId(), name);
  3969. }
  3970. // Comments in header
  3971. void Builder::createBranch(bool implicit, Block* block)
  3972. {
  3973. Instruction* branch = new Instruction(Op::OpBranch);
  3974. branch->addIdOperand(block->getId());
  3975. if (implicit) {
  3976. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  3977. }
  3978. else {
  3979. addInstruction(std::unique_ptr<Instruction>(branch));
  3980. }
  3981. block->addPredecessor(buildPoint);
  3982. }
  3983. void Builder::createSelectionMerge(Block* mergeBlock, SelectionControlMask control)
  3984. {
  3985. Instruction* merge = new Instruction(Op::OpSelectionMerge);
  3986. merge->reserveOperands(2);
  3987. merge->addIdOperand(mergeBlock->getId());
  3988. merge->addImmediateOperand(control);
  3989. addInstruction(std::unique_ptr<Instruction>(merge));
  3990. }
  3991. void Builder::createLoopMerge(Block* mergeBlock, Block* continueBlock, LoopControlMask control,
  3992. const std::vector<unsigned int>& operands)
  3993. {
  3994. Instruction* merge = new Instruction(Op::OpLoopMerge);
  3995. merge->reserveOperands(operands.size() + 3);
  3996. merge->addIdOperand(mergeBlock->getId());
  3997. merge->addIdOperand(continueBlock->getId());
  3998. merge->addImmediateOperand(control);
  3999. for (int op = 0; op < (int)operands.size(); ++op)
  4000. merge->addImmediateOperand(operands[op]);
  4001. addInstruction(std::unique_ptr<Instruction>(merge));
  4002. }
  4003. void Builder::createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock)
  4004. {
  4005. Instruction* branch = new Instruction(Op::OpBranchConditional);
  4006. branch->reserveOperands(3);
  4007. branch->addIdOperand(condition);
  4008. branch->addIdOperand(thenBlock->getId());
  4009. branch->addIdOperand(elseBlock->getId());
  4010. // A conditional branch is always attached to a condition expression
  4011. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  4012. thenBlock->addPredecessor(buildPoint);
  4013. elseBlock->addPredecessor(buildPoint);
  4014. }
  4015. // OpSource
  4016. // [OpSourceContinued]
  4017. // ...
  4018. void Builder::dumpSourceInstructions(const spv::Id fileId, const std::string& text,
  4019. std::vector<unsigned int>& out) const
  4020. {
  4021. const int maxWordCount = 0xFFFF;
  4022. const int opSourceWordCount = 4;
  4023. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  4024. if (sourceLang != SourceLanguage::Unknown) {
  4025. // OpSource Language Version File Source
  4026. Instruction sourceInst(NoResult, NoType, Op::OpSource);
  4027. sourceInst.reserveOperands(3);
  4028. sourceInst.addImmediateOperand(sourceLang);
  4029. sourceInst.addImmediateOperand(sourceVersion);
  4030. // File operand
  4031. if (fileId != NoResult) {
  4032. sourceInst.addIdOperand(fileId);
  4033. // Source operand
  4034. if (text.size() > 0) {
  4035. int nextByte = 0;
  4036. std::string subString;
  4037. while ((int)text.size() - nextByte > 0) {
  4038. subString = text.substr(nextByte, nonNullBytesPerInstruction);
  4039. if (nextByte == 0) {
  4040. // OpSource
  4041. sourceInst.addStringOperand(subString.c_str());
  4042. sourceInst.dump(out);
  4043. } else {
  4044. // OpSourcContinued
  4045. Instruction sourceContinuedInst(Op::OpSourceContinued);
  4046. sourceContinuedInst.addStringOperand(subString.c_str());
  4047. sourceContinuedInst.dump(out);
  4048. }
  4049. nextByte += nonNullBytesPerInstruction;
  4050. }
  4051. } else
  4052. sourceInst.dump(out);
  4053. } else
  4054. sourceInst.dump(out);
  4055. }
  4056. }
  4057. // Dump an OpSource[Continued] sequence for the source and every include file
  4058. void Builder::dumpSourceInstructions(std::vector<unsigned int>& out) const
  4059. {
  4060. if (emitNonSemanticShaderDebugInfo) return;
  4061. dumpSourceInstructions(mainFileId, sourceText, out);
  4062. for (auto iItr = includeFiles.begin(); iItr != includeFiles.end(); ++iItr)
  4063. dumpSourceInstructions(iItr->first, *iItr->second, out);
  4064. }
  4065. template <class Range> void Builder::dumpInstructions(std::vector<unsigned int>& out, const Range& instructions) const
  4066. {
  4067. for (const auto& inst : instructions) {
  4068. inst->dump(out);
  4069. }
  4070. }
  4071. void Builder::dumpModuleProcesses(std::vector<unsigned int>& out) const
  4072. {
  4073. for (int i = 0; i < (int)moduleProcesses.size(); ++i) {
  4074. Instruction moduleProcessed(Op::OpModuleProcessed);
  4075. moduleProcessed.addStringOperand(moduleProcesses[i]);
  4076. moduleProcessed.dump(out);
  4077. }
  4078. }
  4079. bool Builder::DecorationInstructionLessThan::operator()(const std::unique_ptr<Instruction>& lhs,
  4080. const std::unique_ptr<Instruction>& rhs) const
  4081. {
  4082. // Order by the id to which the decoration applies first. This is more intuitive.
  4083. assert(lhs->isIdOperand(0) && rhs->isIdOperand(0));
  4084. if (lhs->getIdOperand(0) != rhs->getIdOperand(0)) {
  4085. return lhs->getIdOperand(0) < rhs->getIdOperand(0);
  4086. }
  4087. if (lhs->getOpCode() != rhs->getOpCode())
  4088. return lhs->getOpCode() < rhs->getOpCode();
  4089. // Now compare the operands.
  4090. int minSize = std::min(lhs->getNumOperands(), rhs->getNumOperands());
  4091. for (int i = 1; i < minSize; ++i) {
  4092. if (lhs->isIdOperand(i) != rhs->isIdOperand(i)) {
  4093. return lhs->isIdOperand(i) < rhs->isIdOperand(i);
  4094. }
  4095. if (lhs->isIdOperand(i)) {
  4096. if (lhs->getIdOperand(i) != rhs->getIdOperand(i)) {
  4097. return lhs->getIdOperand(i) < rhs->getIdOperand(i);
  4098. }
  4099. } else {
  4100. if (lhs->getImmediateOperand(i) != rhs->getImmediateOperand(i)) {
  4101. return lhs->getImmediateOperand(i) < rhs->getImmediateOperand(i);
  4102. }
  4103. }
  4104. }
  4105. if (lhs->getNumOperands() != rhs->getNumOperands())
  4106. return lhs->getNumOperands() < rhs->getNumOperands();
  4107. // In this case they are equal.
  4108. return false;
  4109. }
  4110. } // end spv namespace