SpvBuilder.cpp 187 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125
  1. //
  2. // Copyright (C) 2014-2015 LunarG, Inc.
  3. // Copyright (C) 2015-2018 Google, Inc.
  4. // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights reserved.
  5. //
  6. // All rights reserved.
  7. //
  8. // Redistribution and use in source and binary forms, with or without
  9. // modification, are permitted provided that the following conditions
  10. // are met:
  11. //
  12. // Redistributions of source code must retain the above copyright
  13. // notice, this list of conditions and the following disclaimer.
  14. //
  15. // Redistributions in binary form must reproduce the above
  16. // copyright notice, this list of conditions and the following
  17. // disclaimer in the documentation and/or other materials provided
  18. // with the distribution.
  19. //
  20. // Neither the name of 3Dlabs Inc. Ltd. nor the names of its
  21. // contributors may be used to endorse or promote products derived
  22. // from this software without specific prior written permission.
  23. //
  24. // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  25. // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  26. // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  27. // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  28. // COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  29. // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  30. // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  31. // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
  32. // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
  33. // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
  34. // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
  35. // POSSIBILITY OF SUCH DAMAGE.
  36. //
  37. // Helper for making SPIR-V IR. Generally, this is documented in the header
  38. // SpvBuilder.h.
  39. //
  40. #include <cassert>
  41. #include <cstdlib>
  42. #include <unordered_set>
  43. #include <algorithm>
  44. #include "SpvBuilder.h"
  45. #include "spvUtil.h"
  46. #include "hex_float.h"
  47. #ifndef _WIN32
  48. #include <cstdio>
  49. #endif
  50. namespace spv {
  51. Builder::Builder(unsigned int spvVersion, unsigned int magicNumber, SpvBuildLogger* buildLogger) :
  52. spvVersion(spvVersion),
  53. sourceLang(SourceLanguage::Unknown),
  54. sourceVersion(0),
  55. addressModel(AddressingModel::Logical),
  56. memoryModel(MemoryModel::GLSL450),
  57. builderNumber(magicNumber),
  58. buildPoint(nullptr),
  59. uniqueId(0),
  60. entryPointFunction(nullptr),
  61. generatingOpCodeForSpecConst(false),
  62. logger(buildLogger)
  63. {
  64. clearAccessChain();
  65. }
  66. Builder::~Builder()
  67. {
  68. }
  69. Id Builder::import(const char* name)
  70. {
  71. Instruction* import = new Instruction(getUniqueId(), NoType, Op::OpExtInstImport);
  72. import->addStringOperand(name);
  73. module.mapInstruction(import);
  74. imports.push_back(std::unique_ptr<Instruction>(import));
  75. return import->getResultId();
  76. }
  77. // For creating new groupedTypes (will return old type if the requested one was already made).
  78. Id Builder::makeVoidType()
  79. {
  80. Instruction* type;
  81. if (groupedTypes[enumCast(Op::OpTypeVoid)].size() == 0) {
  82. Id typeId = getUniqueId();
  83. type = new Instruction(typeId, NoType, Op::OpTypeVoid);
  84. groupedTypes[enumCast(Op::OpTypeVoid)].push_back(type);
  85. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  86. module.mapInstruction(type);
  87. // Core OpTypeVoid used for debug void type
  88. if (emitNonSemanticShaderDebugInfo)
  89. debugTypeIdLookup[typeId] = typeId;
  90. } else
  91. type = groupedTypes[enumCast(Op::OpTypeVoid)].back();
  92. return type->getResultId();
  93. }
  94. Id Builder::makeBoolType()
  95. {
  96. Instruction* type;
  97. if (groupedTypes[enumCast(Op::OpTypeBool)].size() == 0) {
  98. type = new Instruction(getUniqueId(), NoType, Op::OpTypeBool);
  99. groupedTypes[enumCast(Op::OpTypeBool)].push_back(type);
  100. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  101. module.mapInstruction(type);
  102. if (emitNonSemanticShaderDebugInfo) {
  103. auto const debugResultId = makeBoolDebugType(32);
  104. debugTypeIdLookup[type->getResultId()] = debugResultId;
  105. }
  106. } else
  107. type = groupedTypes[enumCast(Op::OpTypeBool)].back();
  108. return type->getResultId();
  109. }
  110. Id Builder::makeSamplerType(const char* debugName)
  111. {
  112. Instruction* type;
  113. if (groupedTypes[enumCast(Op::OpTypeSampler)].size() == 0) {
  114. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampler);
  115. groupedTypes[enumCast(Op::OpTypeSampler)].push_back(type);
  116. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  117. module.mapInstruction(type);
  118. } else
  119. type = groupedTypes[enumCast(Op::OpTypeSampler)].back();
  120. if (emitNonSemanticShaderDebugInfo)
  121. {
  122. auto const debugResultId = makeOpaqueDebugType(debugName);
  123. debugTypeIdLookup[type->getResultId()] = debugResultId;
  124. }
  125. return type->getResultId();
  126. }
  127. Id Builder::makePointer(StorageClass storageClass, Id pointee)
  128. {
  129. // try to find it
  130. Instruction* type;
  131. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  132. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  133. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  134. type->getIdOperand(1) == pointee)
  135. return type->getResultId();
  136. }
  137. // not found, make it
  138. type = new Instruction(getUniqueId(), NoType, Op::OpTypePointer);
  139. type->reserveOperands(2);
  140. type->addImmediateOperand(storageClass);
  141. type->addIdOperand(pointee);
  142. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  143. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  144. module.mapInstruction(type);
  145. if (emitNonSemanticShaderDebugInfo) {
  146. const Id debugResultId = makePointerDebugType(storageClass, pointee);
  147. debugTypeIdLookup[type->getResultId()] = debugResultId;
  148. }
  149. return type->getResultId();
  150. }
  151. Id Builder::makeForwardPointer(StorageClass storageClass)
  152. {
  153. // Caching/uniquifying doesn't work here, because we don't know the
  154. // pointee type and there can be multiple forward pointers of the same
  155. // storage type. Somebody higher up in the stack must keep track.
  156. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeForwardPointer);
  157. type->addImmediateOperand(storageClass);
  158. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  159. module.mapInstruction(type);
  160. if (emitNonSemanticShaderDebugInfo) {
  161. const Id debugResultId = makeForwardPointerDebugType(storageClass);
  162. debugTypeIdLookup[type->getResultId()] = debugResultId;
  163. }
  164. return type->getResultId();
  165. }
  166. Id Builder::makeUntypedPointer(StorageClass storageClass, bool setBufferPointer)
  167. {
  168. // try to find it
  169. Instruction* type;
  170. // both typeBufferEXT and UntypedPointer only contains storage class info.
  171. spv::Op typeOp = setBufferPointer ? Op::OpTypeBufferEXT : Op::OpTypeUntypedPointerKHR;
  172. for (int t = 0; t < (int)groupedTypes[enumCast(typeOp)].size(); ++t) {
  173. type = groupedTypes[enumCast(typeOp)][t];
  174. if (type->getImmediateOperand(0) == (unsigned)storageClass)
  175. return type->getResultId();
  176. }
  177. // not found, make it
  178. type = new Instruction(getUniqueId(), NoType, typeOp);
  179. type->addImmediateOperand(storageClass);
  180. groupedTypes[enumCast(typeOp)].push_back(type);
  181. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  182. module.mapInstruction(type);
  183. return type->getResultId();
  184. }
  185. Id Builder::makePointerFromForwardPointer(StorageClass storageClass, Id forwardPointerType, Id pointee)
  186. {
  187. // try to find it
  188. Instruction* type;
  189. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypePointer)].size(); ++t) {
  190. type = groupedTypes[enumCast(Op::OpTypePointer)][t];
  191. if (type->getImmediateOperand(0) == (unsigned)storageClass &&
  192. type->getIdOperand(1) == pointee)
  193. return type->getResultId();
  194. }
  195. type = new Instruction(forwardPointerType, NoType, Op::OpTypePointer);
  196. type->reserveOperands(2);
  197. type->addImmediateOperand(storageClass);
  198. type->addIdOperand(pointee);
  199. groupedTypes[enumCast(Op::OpTypePointer)].push_back(type);
  200. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  201. module.mapInstruction(type);
  202. // If we are emitting nonsemantic debuginfo, we need to patch the debug pointer type
  203. // that was emitted alongside the forward pointer, now that we have a pointee debug
  204. // type for it to point to.
  205. if (emitNonSemanticShaderDebugInfo) {
  206. Instruction *debugForwardPointer = module.getInstruction(getDebugType(forwardPointerType));
  207. assert(getDebugType(pointee));
  208. debugForwardPointer->setIdOperand(2, getDebugType(pointee));
  209. }
  210. return type->getResultId();
  211. }
  212. Id Builder::makeIntegerType(int width, bool hasSign)
  213. {
  214. // try to find it
  215. Instruction* type;
  216. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeInt)].size(); ++t) {
  217. type = groupedTypes[enumCast(Op::OpTypeInt)][t];
  218. if (type->getImmediateOperand(0) == (unsigned)width &&
  219. type->getImmediateOperand(1) == (hasSign ? 1u : 0u))
  220. return type->getResultId();
  221. }
  222. // not found, make it
  223. type = new Instruction(getUniqueId(), NoType, Op::OpTypeInt);
  224. type->reserveOperands(2);
  225. type->addImmediateOperand(width);
  226. type->addImmediateOperand(hasSign ? 1 : 0);
  227. groupedTypes[enumCast(Op::OpTypeInt)].push_back(type);
  228. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  229. module.mapInstruction(type);
  230. // deal with capabilities
  231. switch (width) {
  232. case 8:
  233. case 16:
  234. // these are currently handled by storage-type declarations and post processing
  235. break;
  236. case 64:
  237. addCapability(Capability::Int64);
  238. break;
  239. default:
  240. break;
  241. }
  242. if (emitNonSemanticShaderDebugInfo)
  243. {
  244. auto const debugResultId = makeIntegerDebugType(width, hasSign);
  245. debugTypeIdLookup[type->getResultId()] = debugResultId;
  246. }
  247. return type->getResultId();
  248. }
  249. Id Builder::makeFloatType(int width)
  250. {
  251. // try to find it
  252. Instruction* type;
  253. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  254. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  255. if (type->getNumOperands() != 1) {
  256. continue;
  257. }
  258. if (type->getImmediateOperand(0) == (unsigned)width)
  259. return type->getResultId();
  260. }
  261. // not found, make it
  262. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  263. type->addImmediateOperand(width);
  264. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  265. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  266. module.mapInstruction(type);
  267. // deal with capabilities
  268. switch (width) {
  269. case 16:
  270. // currently handled by storage-type declarations and post processing
  271. break;
  272. case 64:
  273. addCapability(Capability::Float64);
  274. break;
  275. default:
  276. break;
  277. }
  278. if (emitNonSemanticShaderDebugInfo)
  279. {
  280. auto const debugResultId = makeFloatDebugType(width);
  281. debugTypeIdLookup[type->getResultId()] = debugResultId;
  282. }
  283. return type->getResultId();
  284. }
  285. Id Builder::makeBFloat16Type()
  286. {
  287. // try to find it
  288. Instruction* type;
  289. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  290. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  291. if (type->getNumOperands() != 2) {
  292. continue;
  293. }
  294. if (type->getImmediateOperand(0) == (unsigned)16 &&
  295. type->getImmediateOperand(1) == FPEncoding::BFloat16KHR)
  296. return type->getResultId();
  297. }
  298. // not found, make it
  299. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  300. type->addImmediateOperand(16);
  301. type->addImmediateOperand(FPEncoding::BFloat16KHR);
  302. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  303. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  304. module.mapInstruction(type);
  305. addExtension(spv::E_SPV_KHR_bfloat16);
  306. addCapability(Capability::BFloat16TypeKHR);
  307. #if 0
  308. // XXX not supported
  309. if (emitNonSemanticShaderDebugInfo)
  310. {
  311. auto const debugResultId = makeFloatDebugType(width);
  312. debugTypeIdLookup[type->getResultId()] = debugResultId;
  313. }
  314. #endif
  315. return type->getResultId();
  316. }
  317. Id Builder::makeFloatE5M2Type()
  318. {
  319. // try to find it
  320. Instruction* type;
  321. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  322. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  323. if (type->getNumOperands() != 2) {
  324. continue;
  325. }
  326. if (type->getImmediateOperand(0) == (unsigned)8 &&
  327. type->getImmediateOperand(1) == FPEncoding::Float8E5M2EXT)
  328. return type->getResultId();
  329. }
  330. // not found, make it
  331. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  332. type->addImmediateOperand(8);
  333. type->addImmediateOperand(FPEncoding::Float8E5M2EXT);
  334. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  335. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  336. module.mapInstruction(type);
  337. addExtension(spv::E_SPV_EXT_float8);
  338. addCapability(Capability::Float8EXT);
  339. #if 0
  340. // XXX not supported
  341. if (emitNonSemanticShaderDebugInfo)
  342. {
  343. auto const debugResultId = makeFloatDebugType(width);
  344. debugTypeIdLookup[type->getResultId()] = debugResultId;
  345. }
  346. #endif
  347. return type->getResultId();
  348. }
  349. Id Builder::makeFloatE4M3Type()
  350. {
  351. // try to find it
  352. Instruction* type;
  353. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFloat)].size(); ++t) {
  354. type = groupedTypes[enumCast(Op::OpTypeFloat)][t];
  355. if (type->getNumOperands() != 2) {
  356. continue;
  357. }
  358. if (type->getImmediateOperand(0) == (unsigned)8 &&
  359. type->getImmediateOperand(1) == FPEncoding::Float8E4M3EXT)
  360. return type->getResultId();
  361. }
  362. // not found, make it
  363. type = new Instruction(getUniqueId(), NoType, Op::OpTypeFloat);
  364. type->addImmediateOperand(8);
  365. type->addImmediateOperand(FPEncoding::Float8E4M3EXT);
  366. groupedTypes[enumCast(Op::OpTypeFloat)].push_back(type);
  367. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  368. module.mapInstruction(type);
  369. addExtension(spv::E_SPV_EXT_float8);
  370. addCapability(Capability::Float8EXT);
  371. #if 0
  372. // XXX not supported
  373. if (emitNonSemanticShaderDebugInfo)
  374. {
  375. auto const debugResultId = makeFloatDebugType(width);
  376. debugTypeIdLookup[type->getResultId()] = debugResultId;
  377. }
  378. #endif
  379. return type->getResultId();
  380. }
  381. // Make a struct without checking for duplication.
  382. // See makeStructResultType() for non-decorated structs
  383. // needed as the result of some instructions, which does
  384. // check for duplicates.
  385. // For compiler-generated structs, debug info is ignored.
  386. Id Builder::makeStructType(const std::vector<Id>& members, const std::vector<spv::StructMemberDebugInfo>& memberDebugInfo,
  387. const char* name, bool const compilerGenerated)
  388. {
  389. // Don't look for previous one, because in the general case,
  390. // structs can be duplicated except for decorations.
  391. // not found, make it
  392. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeStruct);
  393. for (int op = 0; op < (int)members.size(); ++op)
  394. type->addIdOperand(members[op]);
  395. groupedTypes[enumCast(Op::OpTypeStruct)].push_back(type);
  396. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  397. module.mapInstruction(type);
  398. addName(type->getResultId(), name);
  399. if (emitNonSemanticShaderDebugInfo && !compilerGenerated) {
  400. assert(members.size() == memberDebugInfo.size());
  401. auto const debugResultId =
  402. makeCompositeDebugType(members, memberDebugInfo, name, NonSemanticShaderDebugInfo100Structure);
  403. debugTypeIdLookup[type->getResultId()] = debugResultId;
  404. }
  405. return type->getResultId();
  406. }
  407. // Make a struct for the simple results of several instructions,
  408. // checking for duplication.
  409. Id Builder::makeStructResultType(Id type0, Id type1)
  410. {
  411. // try to find it
  412. Instruction* type;
  413. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeStruct)].size(); ++t) {
  414. type = groupedTypes[enumCast(Op::OpTypeStruct)][t];
  415. if (type->getNumOperands() != 2)
  416. continue;
  417. if (type->getIdOperand(0) != type0 ||
  418. type->getIdOperand(1) != type1)
  419. continue;
  420. return type->getResultId();
  421. }
  422. // not found, make it
  423. std::vector<spv::Id> members;
  424. members.push_back(type0);
  425. members.push_back(type1);
  426. return makeStructType(members, {}, "ResType");
  427. }
  428. Id Builder::makeVectorType(Id component, int size)
  429. {
  430. // try to find it
  431. Instruction* type;
  432. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeVector)].size(); ++t) {
  433. type = groupedTypes[enumCast(Op::OpTypeVector)][t];
  434. if (type->getIdOperand(0) == component &&
  435. type->getImmediateOperand(1) == (unsigned)size)
  436. return type->getResultId();
  437. }
  438. // not found, make it
  439. type = new Instruction(getUniqueId(), NoType, Op::OpTypeVector);
  440. type->reserveOperands(2);
  441. type->addIdOperand(component);
  442. type->addImmediateOperand(size);
  443. groupedTypes[enumCast(Op::OpTypeVector)].push_back(type);
  444. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  445. module.mapInstruction(type);
  446. if (emitNonSemanticShaderDebugInfo)
  447. {
  448. auto const debugResultId = makeVectorDebugType(component, size);
  449. debugTypeIdLookup[type->getResultId()] = debugResultId;
  450. }
  451. return type->getResultId();
  452. }
  453. Id Builder::makeMatrixType(Id component, int cols, int rows)
  454. {
  455. assert(cols <= maxMatrixSize && rows <= maxMatrixSize);
  456. Id column = makeVectorType(component, rows);
  457. // try to find it
  458. Instruction* type;
  459. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeMatrix)].size(); ++t) {
  460. type = groupedTypes[enumCast(Op::OpTypeMatrix)][t];
  461. if (type->getIdOperand(0) == column &&
  462. type->getImmediateOperand(1) == (unsigned)cols)
  463. return type->getResultId();
  464. }
  465. // not found, make it
  466. type = new Instruction(getUniqueId(), NoType, Op::OpTypeMatrix);
  467. type->reserveOperands(2);
  468. type->addIdOperand(column);
  469. type->addImmediateOperand(cols);
  470. groupedTypes[enumCast(Op::OpTypeMatrix)].push_back(type);
  471. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  472. module.mapInstruction(type);
  473. if (emitNonSemanticShaderDebugInfo)
  474. {
  475. auto const debugResultId = makeMatrixDebugType(column, cols);
  476. debugTypeIdLookup[type->getResultId()] = debugResultId;
  477. }
  478. return type->getResultId();
  479. }
  480. Id Builder::makeCooperativeMatrixTypeKHR(Id component, Id scope, Id rows, Id cols, Id use)
  481. {
  482. // try to find it
  483. Instruction* type;
  484. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].size(); ++t) {
  485. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)][t];
  486. if (type->getIdOperand(0) == component &&
  487. type->getIdOperand(1) == scope &&
  488. type->getIdOperand(2) == rows &&
  489. type->getIdOperand(3) == cols &&
  490. type->getIdOperand(4) == use)
  491. return type->getResultId();
  492. }
  493. // not found, make it
  494. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixKHR);
  495. type->reserveOperands(5);
  496. type->addIdOperand(component);
  497. type->addIdOperand(scope);
  498. type->addIdOperand(rows);
  499. type->addIdOperand(cols);
  500. type->addIdOperand(use);
  501. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixKHR)].push_back(type);
  502. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  503. module.mapInstruction(type);
  504. if (emitNonSemanticShaderDebugInfo)
  505. {
  506. // Find a name for one of the parameters. It can either come from debuginfo for another
  507. // type, or an OpName from a constant.
  508. auto const findName = [&](Id id) {
  509. Id id2 = getDebugType(id);
  510. for (auto &t : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic]) {
  511. if (t->getResultId() == id2) {
  512. for (auto &s : strings) {
  513. if (s->getResultId() == t->getIdOperand(2)) {
  514. return s->getNameString();
  515. }
  516. }
  517. }
  518. }
  519. for (auto &t : names) {
  520. if (t->getIdOperand(0) == id) {
  521. return t->getNameString();
  522. }
  523. }
  524. return "unknown";
  525. };
  526. std::string debugName = "coopmat<";
  527. debugName += std::string(findName(component)) + ", ";
  528. if (isConstantScalar(scope)) {
  529. debugName += std::string("gl_Scope") + std::string(spv::ScopeToString((spv::Scope)getConstantScalar(scope))) + ", ";
  530. } else {
  531. debugName += std::string(findName(scope)) + ", ";
  532. }
  533. debugName += std::string(findName(rows)) + ", ";
  534. debugName += std::string(findName(cols)) + ">";
  535. // There's no nonsemantic debug info instruction for cooperative matrix types,
  536. // use opaque composite instead.
  537. auto const debugResultId = makeOpaqueDebugType(debugName.c_str());
  538. debugTypeIdLookup[type->getResultId()] = debugResultId;
  539. }
  540. return type->getResultId();
  541. }
  542. Id Builder::makeCooperativeMatrixTypeNV(Id component, Id scope, Id rows, Id cols)
  543. {
  544. // try to find it
  545. Instruction* type;
  546. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].size(); ++t) {
  547. type = groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)][t];
  548. if (type->getIdOperand(0) == component && type->getIdOperand(1) == scope && type->getIdOperand(2) == rows &&
  549. type->getIdOperand(3) == cols)
  550. return type->getResultId();
  551. }
  552. // not found, make it
  553. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeMatrixNV);
  554. type->reserveOperands(4);
  555. type->addIdOperand(component);
  556. type->addIdOperand(scope);
  557. type->addIdOperand(rows);
  558. type->addIdOperand(cols);
  559. groupedTypes[enumCast(Op::OpTypeCooperativeMatrixNV)].push_back(type);
  560. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  561. module.mapInstruction(type);
  562. return type->getResultId();
  563. }
  564. Id Builder::makeCooperativeMatrixTypeWithSameShape(Id component, Id otherType)
  565. {
  566. Instruction* instr = module.getInstruction(otherType);
  567. if (instr->getOpCode() == Op::OpTypeCooperativeMatrixNV) {
  568. return makeCooperativeMatrixTypeNV(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3));
  569. } else {
  570. assert(instr->getOpCode() == Op::OpTypeCooperativeMatrixKHR);
  571. return makeCooperativeMatrixTypeKHR(component, instr->getIdOperand(1), instr->getIdOperand(2), instr->getIdOperand(3), instr->getIdOperand(4));
  572. }
  573. }
  574. Id Builder::makeCooperativeVectorTypeNV(Id componentType, Id components)
  575. {
  576. // try to find it
  577. Instruction* type;
  578. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].size(); ++t) {
  579. type = groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)][t];
  580. if (type->getIdOperand(0) == componentType &&
  581. type->getIdOperand(1) == components)
  582. return type->getResultId();
  583. }
  584. // not found, make it
  585. type = new Instruction(getUniqueId(), NoType, Op::OpTypeCooperativeVectorNV);
  586. type->addIdOperand(componentType);
  587. type->addIdOperand(components);
  588. groupedTypes[enumCast(Op::OpTypeCooperativeVectorNV)].push_back(type);
  589. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  590. module.mapInstruction(type);
  591. return type->getResultId();
  592. }
  593. Id Builder::makeTensorTypeARM(Id elementType, Id rank)
  594. {
  595. // See if an OpTypeTensorARM with same element type and rank already exists.
  596. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeTensorARM)].size(); ++t) {
  597. const Instruction *type = groupedTypes[enumCast(Op::OpTypeTensorARM)][t];
  598. if (type->getIdOperand(0) == elementType && type->getIdOperand(1) == rank)
  599. return type->getResultId();
  600. }
  601. // Not found, make it.
  602. std::unique_ptr<Instruction> type(new Instruction(getUniqueId(), NoType, Op::OpTypeTensorARM));
  603. type->addIdOperand(elementType);
  604. type->addIdOperand(rank);
  605. groupedTypes[enumCast(Op::OpTypeTensorARM)].push_back(type.get());
  606. module.mapInstruction(type.get());
  607. Id resultID = type->getResultId();
  608. constantsTypesGlobals.push_back(std::move(type));
  609. return resultID;
  610. }
  611. Id Builder::makeGenericType(spv::Op opcode, std::vector<spv::IdImmediate>& operands)
  612. {
  613. // try to find it
  614. Instruction* type;
  615. for (int t = 0; t < (int)groupedTypes[enumCast(opcode)].size(); ++t) {
  616. type = groupedTypes[enumCast(opcode)][t];
  617. if (static_cast<size_t>(type->getNumOperands()) != operands.size())
  618. continue; // Number mismatch, find next
  619. bool match = true;
  620. for (int op = 0; match && op < (int)operands.size(); ++op) {
  621. match = (operands[op].isId ? type->getIdOperand(op) : type->getImmediateOperand(op)) == operands[op].word;
  622. }
  623. if (match)
  624. return type->getResultId();
  625. }
  626. // not found, make it
  627. type = new Instruction(getUniqueId(), NoType, opcode);
  628. type->reserveOperands(operands.size());
  629. for (size_t op = 0; op < operands.size(); ++op) {
  630. if (operands[op].isId)
  631. type->addIdOperand(operands[op].word);
  632. else
  633. type->addImmediateOperand(operands[op].word);
  634. }
  635. groupedTypes[enumCast(opcode)].push_back(type);
  636. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  637. module.mapInstruction(type);
  638. return type->getResultId();
  639. }
  640. // TODO: performance: track arrays per stride
  641. // If a stride is supplied (non-zero) make an array.
  642. // If no stride (0), reuse previous array types.
  643. // 'size' is an Id of a constant or specialization constant of the array size
  644. Id Builder::makeArrayType(Id element, Id sizeId, int stride)
  645. {
  646. Instruction* type;
  647. if (stride == 0) {
  648. // try to find existing type
  649. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeArray)].size(); ++t) {
  650. type = groupedTypes[enumCast(Op::OpTypeArray)][t];
  651. if (type->getIdOperand(0) == element &&
  652. type->getIdOperand(1) == sizeId &&
  653. explicitlyLaidOut.find(type->getResultId()) == explicitlyLaidOut.end())
  654. return type->getResultId();
  655. }
  656. }
  657. // not found, make it
  658. type = new Instruction(getUniqueId(), NoType, Op::OpTypeArray);
  659. type->reserveOperands(2);
  660. type->addIdOperand(element);
  661. type->addIdOperand(sizeId);
  662. groupedTypes[enumCast(Op::OpTypeArray)].push_back(type);
  663. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  664. module.mapInstruction(type);
  665. if (stride != 0) {
  666. explicitlyLaidOut.insert(type->getResultId());
  667. }
  668. if (emitNonSemanticShaderDebugInfo)
  669. {
  670. auto const debugResultId = makeArrayDebugType(element, sizeId);
  671. debugTypeIdLookup[type->getResultId()] = debugResultId;
  672. }
  673. return type->getResultId();
  674. }
  675. Id Builder::makeRuntimeArray(Id element)
  676. {
  677. Instruction* type = new Instruction(getUniqueId(), NoType, Op::OpTypeRuntimeArray);
  678. type->addIdOperand(element);
  679. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  680. module.mapInstruction(type);
  681. if (emitNonSemanticShaderDebugInfo)
  682. {
  683. auto const debugResultId = makeArrayDebugType(element, makeUintConstant(0));
  684. debugTypeIdLookup[type->getResultId()] = debugResultId;
  685. }
  686. return type->getResultId();
  687. }
  688. Id Builder::makeFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  689. {
  690. // try to find it
  691. Instruction* type;
  692. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeFunction)].size(); ++t) {
  693. type = groupedTypes[enumCast(Op::OpTypeFunction)][t];
  694. if (type->getIdOperand(0) != returnType || (int)paramTypes.size() != type->getNumOperands() - 1)
  695. continue;
  696. bool mismatch = false;
  697. for (int p = 0; p < (int)paramTypes.size(); ++p) {
  698. if (paramTypes[p] != type->getIdOperand(p + 1)) {
  699. mismatch = true;
  700. break;
  701. }
  702. }
  703. if (! mismatch)
  704. {
  705. // If compiling HLSL, glslang will create a wrapper function around the entrypoint. Accordingly, a void(void)
  706. // function type is created for the wrapper function. However, nonsemantic shader debug information is disabled
  707. // while creating the HLSL wrapper. Consequently, if we encounter another void(void) function, we need to create
  708. // the associated debug function type if it hasn't been created yet.
  709. if(emitNonSemanticShaderDebugInfo && getDebugType(type->getResultId()) == NoType) {
  710. assert(sourceLang == spv::SourceLanguage::HLSL);
  711. assert(getTypeClass(returnType) == Op::OpTypeVoid && paramTypes.size() == 0);
  712. Id id = makeDebugFunctionType(returnType, {});
  713. debugTypeIdLookup[type->getResultId()] = id;
  714. }
  715. return type->getResultId();
  716. }
  717. }
  718. // not found, make it
  719. Id typeId = getUniqueId();
  720. type = new Instruction(typeId, NoType, Op::OpTypeFunction);
  721. type->reserveOperands(paramTypes.size() + 1);
  722. type->addIdOperand(returnType);
  723. for (int p = 0; p < (int)paramTypes.size(); ++p)
  724. type->addIdOperand(paramTypes[p]);
  725. groupedTypes[enumCast(Op::OpTypeFunction)].push_back(type);
  726. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  727. module.mapInstruction(type);
  728. // make debug type and map it
  729. if (emitNonSemanticShaderDebugInfo) {
  730. Id debugTypeId = makeDebugFunctionType(returnType, paramTypes);
  731. debugTypeIdLookup[typeId] = debugTypeId;
  732. }
  733. return type->getResultId();
  734. }
  735. Id Builder::makeDebugFunctionType(Id returnType, const std::vector<Id>& paramTypes)
  736. {
  737. assert(getDebugType(returnType) != NoType);
  738. Id typeId = getUniqueId();
  739. auto type = new Instruction(typeId, makeVoidType(), Op::OpExtInst);
  740. type->reserveOperands(paramTypes.size() + 4);
  741. type->addIdOperand(nonSemanticShaderDebugInfo);
  742. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeFunction);
  743. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  744. type->addIdOperand(getDebugType(returnType));
  745. for (auto const paramType : paramTypes) {
  746. if (isPointerType(paramType) || isArrayType(paramType)) {
  747. type->addIdOperand(getDebugType(getContainedTypeId(paramType)));
  748. }
  749. else {
  750. type->addIdOperand(getDebugType(paramType));
  751. }
  752. }
  753. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  754. module.mapInstruction(type);
  755. return typeId;
  756. }
  757. Id Builder::makeImageType(Id sampledType, Dim dim, bool depth, bool arrayed, bool ms, unsigned sampled,
  758. ImageFormat format, const char* debugName)
  759. {
  760. assert(sampled == 1 || sampled == 2);
  761. // try to find it
  762. Instruction* type;
  763. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeImage)].size(); ++t) {
  764. type = groupedTypes[enumCast(Op::OpTypeImage)][t];
  765. if (type->getIdOperand(0) == sampledType &&
  766. type->getImmediateOperand(1) == (unsigned int)dim &&
  767. type->getImmediateOperand(2) == ( depth ? 1u : 0u) &&
  768. type->getImmediateOperand(3) == (arrayed ? 1u : 0u) &&
  769. type->getImmediateOperand(4) == ( ms ? 1u : 0u) &&
  770. type->getImmediateOperand(5) == sampled &&
  771. type->getImmediateOperand(6) == (unsigned int)format)
  772. return type->getResultId();
  773. }
  774. // not found, make it
  775. type = new Instruction(getUniqueId(), NoType, Op::OpTypeImage);
  776. type->reserveOperands(7);
  777. type->addIdOperand(sampledType);
  778. type->addImmediateOperand( dim);
  779. type->addImmediateOperand( depth ? 1 : 0);
  780. type->addImmediateOperand(arrayed ? 1 : 0);
  781. type->addImmediateOperand( ms ? 1 : 0);
  782. type->addImmediateOperand(sampled);
  783. type->addImmediateOperand((unsigned int)format);
  784. groupedTypes[enumCast(Op::OpTypeImage)].push_back(type);
  785. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  786. module.mapInstruction(type);
  787. // deal with capabilities
  788. switch (dim) {
  789. case Dim::Buffer:
  790. if (sampled == 1)
  791. addCapability(Capability::SampledBuffer);
  792. else
  793. addCapability(Capability::ImageBuffer);
  794. break;
  795. case Dim::Dim1D:
  796. if (sampled == 1)
  797. addCapability(Capability::Sampled1D);
  798. else
  799. addCapability(Capability::Image1D);
  800. break;
  801. case Dim::Cube:
  802. if (arrayed) {
  803. if (sampled == 1)
  804. addCapability(Capability::SampledCubeArray);
  805. else
  806. addCapability(Capability::ImageCubeArray);
  807. }
  808. break;
  809. case Dim::Rect:
  810. if (sampled == 1)
  811. addCapability(Capability::SampledRect);
  812. else
  813. addCapability(Capability::ImageRect);
  814. break;
  815. case Dim::SubpassData:
  816. addCapability(Capability::InputAttachment);
  817. break;
  818. default:
  819. break;
  820. }
  821. if (ms) {
  822. if (sampled == 2) {
  823. // Images used with subpass data are not storage
  824. // images, so don't require the capability for them.
  825. if (dim != Dim::SubpassData)
  826. addCapability(Capability::StorageImageMultisample);
  827. if (arrayed)
  828. addCapability(Capability::ImageMSArray);
  829. }
  830. }
  831. if (emitNonSemanticShaderDebugInfo)
  832. {
  833. auto const debugResultId = makeOpaqueDebugType(debugName);
  834. debugTypeIdLookup[type->getResultId()] = debugResultId;
  835. }
  836. return type->getResultId();
  837. }
  838. Id Builder::makeSampledImageType(Id imageType, const char* debugName)
  839. {
  840. // try to find it
  841. Instruction* type;
  842. for (int t = 0; t < (int)groupedTypes[enumCast(Op::OpTypeSampledImage)].size(); ++t) {
  843. type = groupedTypes[enumCast(Op::OpTypeSampledImage)][t];
  844. if (type->getIdOperand(0) == imageType)
  845. return type->getResultId();
  846. }
  847. // not found, make it
  848. type = new Instruction(getUniqueId(), NoType, Op::OpTypeSampledImage);
  849. type->addIdOperand(imageType);
  850. groupedTypes[enumCast(Op::OpTypeSampledImage)].push_back(type);
  851. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  852. module.mapInstruction(type);
  853. if (emitNonSemanticShaderDebugInfo)
  854. {
  855. auto const debugResultId = makeOpaqueDebugType(debugName);
  856. debugTypeIdLookup[type->getResultId()] = debugResultId;
  857. }
  858. return type->getResultId();
  859. }
  860. Id Builder::makeDebugInfoNone()
  861. {
  862. if (debugInfoNone != 0)
  863. return debugInfoNone;
  864. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  865. inst->reserveOperands(2);
  866. inst->addIdOperand(nonSemanticShaderDebugInfo);
  867. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugInfoNone);
  868. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  869. module.mapInstruction(inst);
  870. debugInfoNone = inst->getResultId();
  871. return debugInfoNone;
  872. }
  873. Id Builder::makeBoolDebugType(int const size)
  874. {
  875. // try to find it
  876. Instruction* type;
  877. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  878. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  879. if (type->getIdOperand(0) == getStringId("bool") &&
  880. type->getIdOperand(1) == static_cast<unsigned int>(size) &&
  881. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Boolean)
  882. return type->getResultId();
  883. }
  884. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  885. type->reserveOperands(6);
  886. type->addIdOperand(nonSemanticShaderDebugInfo);
  887. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  888. type->addIdOperand(getStringId("bool")); // name id
  889. type->addIdOperand(makeUintConstant(size)); // size id
  890. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Boolean)); // encoding id
  891. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  892. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  893. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  894. module.mapInstruction(type);
  895. return type->getResultId();
  896. }
  897. Id Builder::makeIntegerDebugType(int const width, bool const hasSign)
  898. {
  899. const char* typeName = nullptr;
  900. switch (width) {
  901. case 8: typeName = hasSign ? "int8_t" : "uint8_t"; break;
  902. case 16: typeName = hasSign ? "int16_t" : "uint16_t"; break;
  903. case 64: typeName = hasSign ? "int64_t" : "uint64_t"; break;
  904. default: typeName = hasSign ? "int" : "uint";
  905. }
  906. auto nameId = getStringId(typeName);
  907. // try to find it
  908. Instruction* type;
  909. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  910. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  911. if (type->getIdOperand(0) == nameId &&
  912. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  913. type->getIdOperand(2) == (hasSign ? NonSemanticShaderDebugInfo100Signed : NonSemanticShaderDebugInfo100Unsigned))
  914. return type->getResultId();
  915. }
  916. // not found, make it
  917. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  918. type->reserveOperands(6);
  919. type->addIdOperand(nonSemanticShaderDebugInfo);
  920. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  921. type->addIdOperand(nameId); // name id
  922. type->addIdOperand(makeUintConstant(width)); // size id
  923. if(hasSign == true) {
  924. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Signed)); // encoding id
  925. } else {
  926. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Unsigned)); // encoding id
  927. }
  928. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  929. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  930. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  931. module.mapInstruction(type);
  932. return type->getResultId();
  933. }
  934. Id Builder::makeFloatDebugType(int const width)
  935. {
  936. const char* typeName = nullptr;
  937. switch (width) {
  938. case 16: typeName = "float16_t"; break;
  939. case 64: typeName = "double"; break;
  940. default: typeName = "float"; break;
  941. }
  942. auto nameId = getStringId(typeName);
  943. // try to find it
  944. Instruction* type;
  945. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].size(); ++t) {
  946. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic][t];
  947. if (type->getIdOperand(0) == nameId &&
  948. type->getIdOperand(1) == static_cast<unsigned int>(width) &&
  949. type->getIdOperand(2) == NonSemanticShaderDebugInfo100Float)
  950. return type->getResultId();
  951. }
  952. // not found, make it
  953. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  954. type->reserveOperands(6);
  955. type->addIdOperand(nonSemanticShaderDebugInfo);
  956. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeBasic);
  957. type->addIdOperand(nameId); // name id
  958. type->addIdOperand(makeUintConstant(width)); // size id
  959. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Float)); // encoding id
  960. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100None)); // flags id
  961. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeBasic].push_back(type);
  962. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  963. module.mapInstruction(type);
  964. return type->getResultId();
  965. }
  966. Id Builder::makeSequentialDebugType(Id const baseType, Id const componentCount, NonSemanticShaderDebugInfo100Instructions const sequenceType)
  967. {
  968. assert(sequenceType == NonSemanticShaderDebugInfo100DebugTypeArray ||
  969. sequenceType == NonSemanticShaderDebugInfo100DebugTypeVector);
  970. // try to find it
  971. Instruction* type;
  972. for (int t = 0; t < (int)groupedDebugTypes[sequenceType].size(); ++t) {
  973. type = groupedDebugTypes[sequenceType][t];
  974. if (type->getIdOperand(0) == baseType &&
  975. type->getIdOperand(1) == makeUintConstant(componentCount))
  976. return type->getResultId();
  977. }
  978. // not found, make it
  979. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  980. type->reserveOperands(4);
  981. type->addIdOperand(nonSemanticShaderDebugInfo);
  982. type->addImmediateOperand(sequenceType);
  983. type->addIdOperand(getDebugType(baseType)); // base type
  984. type->addIdOperand(componentCount); // component count
  985. groupedDebugTypes[sequenceType].push_back(type);
  986. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  987. module.mapInstruction(type);
  988. return type->getResultId();
  989. }
  990. Id Builder::makeArrayDebugType(Id const baseType, Id const componentCount)
  991. {
  992. return makeSequentialDebugType(baseType, componentCount, NonSemanticShaderDebugInfo100DebugTypeArray);
  993. }
  994. Id Builder::makeVectorDebugType(Id const baseType, int const componentCount)
  995. {
  996. return makeSequentialDebugType(baseType, makeUintConstant(componentCount), NonSemanticShaderDebugInfo100DebugTypeVector);
  997. }
  998. Id Builder::makeMatrixDebugType(Id const vectorType, int const vectorCount, bool columnMajor)
  999. {
  1000. // try to find it
  1001. Instruction* type;
  1002. for (int t = 0; t < (int)groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].size(); ++t) {
  1003. type = groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix][t];
  1004. if (type->getIdOperand(0) == vectorType &&
  1005. type->getIdOperand(1) == makeUintConstant(vectorCount))
  1006. return type->getResultId();
  1007. }
  1008. // not found, make it
  1009. type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1010. type->reserveOperands(5);
  1011. type->addIdOperand(nonSemanticShaderDebugInfo);
  1012. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMatrix);
  1013. type->addIdOperand(getDebugType(vectorType)); // vector type id
  1014. type->addIdOperand(makeUintConstant(vectorCount)); // component count id
  1015. type->addIdOperand(makeBoolConstant(columnMajor)); // column-major id
  1016. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMatrix].push_back(type);
  1017. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1018. module.mapInstruction(type);
  1019. return type->getResultId();
  1020. }
  1021. Id Builder::makeMemberDebugType(Id const memberType, StructMemberDebugInfo const& debugTypeLoc)
  1022. {
  1023. assert(getDebugType(memberType) != NoType);
  1024. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1025. type->reserveOperands(10);
  1026. type->addIdOperand(nonSemanticShaderDebugInfo);
  1027. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeMember);
  1028. type->addIdOperand(getStringId(debugTypeLoc.name)); // name id
  1029. type->addIdOperand(debugTypeLoc.debugTypeOverride != 0 ? debugTypeLoc.debugTypeOverride
  1030. : getDebugType(memberType)); // type id
  1031. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1032. type->addIdOperand(makeUintConstant(debugTypeLoc.line)); // line id TODO: currentLine is always zero
  1033. type->addIdOperand(makeUintConstant(debugTypeLoc.column)); // TODO: column id
  1034. type->addIdOperand(makeUintConstant(0)); // TODO: offset id
  1035. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1036. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1037. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeMember].push_back(type);
  1038. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1039. module.mapInstruction(type);
  1040. return type->getResultId();
  1041. }
  1042. Id Builder::makeCompositeDebugType(std::vector<Id> const& memberTypes, std::vector<StructMemberDebugInfo> const& memberDebugInfo,
  1043. char const* const name, NonSemanticShaderDebugInfo100DebugCompositeType const tag)
  1044. {
  1045. // Create the debug member types.
  1046. std::vector<Id> memberDebugTypes;
  1047. assert(memberTypes.size() == memberDebugInfo.size());
  1048. for (size_t i = 0; i < memberTypes.size(); i++) {
  1049. if (getDebugType(memberTypes[i]) != NoType) {
  1050. memberDebugTypes.emplace_back(makeMemberDebugType(memberTypes[i], memberDebugInfo[i]));
  1051. }
  1052. }
  1053. // Create The structure debug type.
  1054. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1055. type->reserveOperands(memberDebugTypes.size() + 11);
  1056. type->addIdOperand(nonSemanticShaderDebugInfo);
  1057. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
  1058. type->addIdOperand(getStringId(name)); // name id
  1059. type->addIdOperand(makeUintConstant(tag)); // tag id
  1060. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1061. type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1062. type->addIdOperand(makeUintConstant(0)); // TODO: column id
  1063. type->addIdOperand(makeDebugCompilationUnit()); // scope id
  1064. type->addIdOperand(getStringId(name)); // linkage name id
  1065. type->addIdOperand(makeUintConstant(0)); // TODO: size id
  1066. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1067. for(auto const memberDebugType : memberDebugTypes) {
  1068. type->addIdOperand(memberDebugType);
  1069. }
  1070. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
  1071. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1072. module.mapInstruction(type);
  1073. return type->getResultId();
  1074. }
  1075. // The NonSemantic Shader Debug Info doesn't really have a dedicated opcode for opaque types. Instead, we use DebugTypeComposite.
  1076. // To represent a source language opaque type, this instruction must have no Members operands, Size operand must be
  1077. // DebugInfoNone, and Name must start with @ to avoid clashes with user defined names.
  1078. Id Builder::makeOpaqueDebugType(char const* const name)
  1079. {
  1080. // Create The structure debug type.
  1081. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1082. type->reserveOperands(11);
  1083. type->addIdOperand(nonSemanticShaderDebugInfo);
  1084. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypeComposite);
  1085. type->addIdOperand(getStringId(name)); // name id
  1086. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100Structure)); // tag id
  1087. type->addIdOperand(makeDebugSource(currentFileId)); // source id
  1088. type->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1089. type->addIdOperand(makeUintConstant(0)); // TODO: column id
  1090. type->addIdOperand(makeDebugCompilationUnit()); // scope id
  1091. // Prepend '@' to opaque types.
  1092. type->addIdOperand(getStringId('@' + std::string(name))); // linkage name id
  1093. type->addIdOperand(makeDebugInfoNone()); // size id
  1094. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic)); // flags id
  1095. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypeComposite].push_back(type);
  1096. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1097. module.mapInstruction(type);
  1098. return type->getResultId();
  1099. }
  1100. Id Builder::makePointerDebugType(StorageClass storageClass, Id const baseType)
  1101. {
  1102. const Id debugBaseType = getDebugType(baseType);
  1103. if (!debugBaseType) {
  1104. return makeDebugInfoNone();
  1105. }
  1106. const Id scID = makeUintConstant(storageClass);
  1107. for (Instruction* otherType : groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer]) {
  1108. if (otherType->getIdOperand(2) == debugBaseType &&
  1109. otherType->getIdOperand(3) == scID) {
  1110. return otherType->getResultId();
  1111. }
  1112. }
  1113. Instruction* type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1114. type->reserveOperands(5);
  1115. type->addIdOperand(nonSemanticShaderDebugInfo);
  1116. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1117. type->addIdOperand(debugBaseType);
  1118. type->addIdOperand(scID);
  1119. type->addIdOperand(makeUintConstant(0));
  1120. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1121. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1122. module.mapInstruction(type);
  1123. return type->getResultId();
  1124. }
  1125. // Emit a OpExtInstWithForwardRefsKHR nonsemantic instruction for a pointer debug type
  1126. // where we don't have the pointee yet. Since we don't have the pointee yet, it just
  1127. // points to itself and we rely on patching it later.
  1128. Id Builder::makeForwardPointerDebugType(StorageClass storageClass)
  1129. {
  1130. const Id scID = makeUintConstant(storageClass);
  1131. this->addExtension(spv::E_SPV_KHR_relaxed_extended_instruction);
  1132. Instruction *type = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInstWithForwardRefsKHR);
  1133. type->addIdOperand(nonSemanticShaderDebugInfo);
  1134. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugTypePointer);
  1135. type->addIdOperand(type->getResultId());
  1136. type->addIdOperand(scID);
  1137. type->addIdOperand(makeUintConstant(0));
  1138. groupedDebugTypes[NonSemanticShaderDebugInfo100DebugTypePointer].push_back(type);
  1139. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1140. module.mapInstruction(type);
  1141. return type->getResultId();
  1142. }
  1143. Id Builder::makeDebugSource(const Id fileName) {
  1144. if (debugSourceId.find(fileName) != debugSourceId.end())
  1145. return debugSourceId[fileName];
  1146. spv::Id resultId = getUniqueId();
  1147. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1148. sourceInst->reserveOperands(3);
  1149. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1150. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSource);
  1151. sourceInst->addIdOperand(fileName);
  1152. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1153. module.mapInstruction(sourceInst);
  1154. if (emitNonSemanticShaderDebugSource) {
  1155. const int maxWordCount = 0xFFFF;
  1156. const int opSourceWordCount = 4;
  1157. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  1158. auto processDebugSource = [&](std::string source) {
  1159. if (source.size() > 0) {
  1160. int nextByte = 0;
  1161. while ((int)source.size() - nextByte > 0) {
  1162. auto subString = source.substr(nextByte, nonNullBytesPerInstruction);
  1163. auto sourceId = getStringId(subString);
  1164. if (nextByte == 0) {
  1165. // DebugSource
  1166. sourceInst->addIdOperand(sourceId);
  1167. } else {
  1168. // DebugSourceContinued
  1169. Instruction* sourceContinuedInst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1170. sourceContinuedInst->reserveOperands(2);
  1171. sourceContinuedInst->addIdOperand(nonSemanticShaderDebugInfo);
  1172. sourceContinuedInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugSourceContinued);
  1173. sourceContinuedInst->addIdOperand(sourceId);
  1174. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceContinuedInst));
  1175. module.mapInstruction(sourceContinuedInst);
  1176. }
  1177. nextByte += nonNullBytesPerInstruction;
  1178. }
  1179. } else {
  1180. auto sourceId = getStringId(source);
  1181. sourceInst->addIdOperand(sourceId);
  1182. }
  1183. };
  1184. if (fileName == mainFileId) {
  1185. processDebugSource(sourceText);
  1186. } else {
  1187. auto incItr = includeFiles.find(fileName);
  1188. if (incItr != includeFiles.end()) {
  1189. processDebugSource(*incItr->second);
  1190. } else {
  1191. // We omit the optional source text item if not available in glslang
  1192. }
  1193. }
  1194. }
  1195. debugSourceId[fileName] = resultId;
  1196. return resultId;
  1197. }
  1198. Id Builder::makeDebugCompilationUnit() {
  1199. if (nonSemanticShaderCompilationUnitId != 0)
  1200. return nonSemanticShaderCompilationUnitId;
  1201. spv::Id resultId = getUniqueId();
  1202. Instruction* sourceInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  1203. sourceInst->reserveOperands(6);
  1204. sourceInst->addIdOperand(nonSemanticShaderDebugInfo);
  1205. sourceInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugCompilationUnit);
  1206. sourceInst->addIdOperand(makeUintConstant(1)); // TODO(greg-lunarg): Get rid of magic number
  1207. sourceInst->addIdOperand(makeUintConstant(4)); // TODO(greg-lunarg): Get rid of magic number
  1208. sourceInst->addIdOperand(makeDebugSource(mainFileId));
  1209. sourceInst->addIdOperand(makeUintConstant(sourceLang));
  1210. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(sourceInst));
  1211. module.mapInstruction(sourceInst);
  1212. nonSemanticShaderCompilationUnitId = resultId;
  1213. // We can reasonably assume that makeDebugCompilationUnit will be called before any of
  1214. // debug-scope stack. Function scopes and lexical scopes will occur afterward.
  1215. assert(currentDebugScopeId.empty());
  1216. currentDebugScopeId.push(nonSemanticShaderCompilationUnitId);
  1217. return resultId;
  1218. }
  1219. Id Builder::createDebugGlobalVariable(Id const type, char const*const name, Id const variable)
  1220. {
  1221. assert(type != 0);
  1222. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1223. inst->reserveOperands(11);
  1224. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1225. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugGlobalVariable);
  1226. inst->addIdOperand(getStringId(name)); // name id
  1227. inst->addIdOperand(type); // type id
  1228. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1229. inst->addIdOperand(makeUintConstant(currentLine)); // line id TODO: currentLine always zero?
  1230. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1231. inst->addIdOperand(makeDebugCompilationUnit()); // scope id
  1232. inst->addIdOperand(getStringId(name)); // linkage name id
  1233. inst->addIdOperand(variable); // variable id
  1234. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsDefinition)); // flags id
  1235. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1236. module.mapInstruction(inst);
  1237. return inst->getResultId();
  1238. }
  1239. Id Builder::createDebugLocalVariable(Id type, char const*const name, size_t const argNumber)
  1240. {
  1241. assert(name != nullptr);
  1242. assert(!currentDebugScopeId.empty());
  1243. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1244. inst->reserveOperands(9);
  1245. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1246. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLocalVariable);
  1247. inst->addIdOperand(getStringId(name)); // name id
  1248. inst->addIdOperand(type); // type id
  1249. inst->addIdOperand(makeDebugSource(currentFileId)); // source id
  1250. inst->addIdOperand(makeUintConstant(currentLine)); // line id
  1251. inst->addIdOperand(makeUintConstant(0)); // TODO: column id
  1252. inst->addIdOperand(currentDebugScopeId.top()); // scope id
  1253. inst->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsLocal)); // flags id
  1254. if(argNumber != 0) {
  1255. inst->addIdOperand(makeUintConstant(static_cast<unsigned int>(argNumber)));
  1256. }
  1257. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1258. module.mapInstruction(inst);
  1259. return inst->getResultId();
  1260. }
  1261. Id Builder::makeDebugExpression()
  1262. {
  1263. if (debugExpression != 0)
  1264. return debugExpression;
  1265. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1266. inst->reserveOperands(2);
  1267. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1268. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugExpression);
  1269. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  1270. module.mapInstruction(inst);
  1271. debugExpression = inst->getResultId();
  1272. return debugExpression;
  1273. }
  1274. Id Builder::makeDebugDeclare(Id const debugLocalVariable, Id const pointer)
  1275. {
  1276. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1277. inst->reserveOperands(5);
  1278. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1279. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugDeclare);
  1280. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1281. inst->addIdOperand(pointer); // pointer to local variable id
  1282. inst->addIdOperand(makeDebugExpression()); // expression id
  1283. addInstruction(std::unique_ptr<Instruction>(inst));
  1284. return inst->getResultId();
  1285. }
  1286. Id Builder::makeDebugValue(Id const debugLocalVariable, Id const value)
  1287. {
  1288. Instruction* inst = new Instruction(getUniqueId(), makeVoidType(), Op::OpExtInst);
  1289. inst->reserveOperands(5);
  1290. inst->addIdOperand(nonSemanticShaderDebugInfo);
  1291. inst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugValue);
  1292. inst->addIdOperand(debugLocalVariable); // debug local variable id
  1293. inst->addIdOperand(value); // value of local variable id
  1294. inst->addIdOperand(makeDebugExpression()); // expression id
  1295. addInstruction(std::unique_ptr<Instruction>(inst));
  1296. return inst->getResultId();
  1297. }
  1298. Id Builder::makeAccelerationStructureType()
  1299. {
  1300. Instruction *type;
  1301. if (groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].size() == 0) {
  1302. type = new Instruction(getUniqueId(), NoType, Op::OpTypeAccelerationStructureKHR);
  1303. groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].push_back(type);
  1304. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1305. module.mapInstruction(type);
  1306. if (emitNonSemanticShaderDebugInfo) {
  1307. spv::Id debugType = makeOpaqueDebugType("accelerationStructure");
  1308. debugTypeIdLookup[type->getResultId()] = debugType;
  1309. }
  1310. } else {
  1311. type = groupedTypes[enumCast(Op::OpTypeAccelerationStructureKHR)].back();
  1312. }
  1313. return type->getResultId();
  1314. }
  1315. Id Builder::makeRayQueryType()
  1316. {
  1317. Instruction *type;
  1318. if (groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].size() == 0) {
  1319. type = new Instruction(getUniqueId(), NoType, Op::OpTypeRayQueryKHR);
  1320. groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].push_back(type);
  1321. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1322. module.mapInstruction(type);
  1323. if (emitNonSemanticShaderDebugInfo) {
  1324. spv::Id debugType = makeOpaqueDebugType("rayQuery");
  1325. debugTypeIdLookup[type->getResultId()] = debugType;
  1326. }
  1327. } else {
  1328. type = groupedTypes[enumCast(Op::OpTypeRayQueryKHR)].back();
  1329. }
  1330. return type->getResultId();
  1331. }
  1332. Id Builder::makeHitObjectEXTType()
  1333. {
  1334. Instruction *type;
  1335. if (groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].size() == 0) {
  1336. type = new Instruction(getUniqueId(), NoType, Op::OpTypeHitObjectEXT);
  1337. groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].push_back(type);
  1338. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1339. module.mapInstruction(type);
  1340. } else {
  1341. type = groupedTypes[enumCast(Op::OpTypeHitObjectEXT)].back();
  1342. }
  1343. return type->getResultId();
  1344. }
  1345. Id Builder::makeHitObjectNVType()
  1346. {
  1347. Instruction *type;
  1348. if (groupedTypes[enumCast(Op::OpTypeHitObjectNV)].size() == 0) {
  1349. type = new Instruction(getUniqueId(), NoType, Op::OpTypeHitObjectNV);
  1350. groupedTypes[enumCast(Op::OpTypeHitObjectNV)].push_back(type);
  1351. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  1352. module.mapInstruction(type);
  1353. if (emitNonSemanticShaderDebugInfo) {
  1354. spv::Id debugType = makeOpaqueDebugType("hitObjectNV");
  1355. debugTypeIdLookup[type->getResultId()] = debugType;
  1356. }
  1357. } else {
  1358. type = groupedTypes[enumCast(Op::OpTypeHitObjectNV)].back();
  1359. }
  1360. return type->getResultId();
  1361. }
  1362. Id Builder::getDerefTypeId(Id resultId) const
  1363. {
  1364. Id typeId = getTypeId(resultId);
  1365. assert(isPointerType(typeId));
  1366. return module.getInstruction(typeId)->getIdOperand(1);
  1367. }
  1368. Op Builder::getMostBasicTypeClass(Id typeId) const
  1369. {
  1370. Instruction* instr = module.getInstruction(typeId);
  1371. Op typeClass = instr->getOpCode();
  1372. switch (typeClass)
  1373. {
  1374. case Op::OpTypeVector:
  1375. case Op::OpTypeMatrix:
  1376. case Op::OpTypeArray:
  1377. case Op::OpTypeRuntimeArray:
  1378. return getMostBasicTypeClass(instr->getIdOperand(0));
  1379. case Op::OpTypePointer:
  1380. return getMostBasicTypeClass(instr->getIdOperand(1));
  1381. default:
  1382. return typeClass;
  1383. }
  1384. }
  1385. unsigned int Builder::getNumTypeConstituents(Id typeId) const
  1386. {
  1387. Instruction* instr = module.getInstruction(typeId);
  1388. switch (instr->getOpCode())
  1389. {
  1390. case Op::OpTypeBool:
  1391. case Op::OpTypeInt:
  1392. case Op::OpTypeFloat:
  1393. case Op::OpTypePointer:
  1394. return 1;
  1395. case Op::OpTypeVector:
  1396. case Op::OpTypeMatrix:
  1397. return instr->getImmediateOperand(1);
  1398. case Op::OpTypeCooperativeVectorNV:
  1399. case Op::OpTypeArray:
  1400. {
  1401. Id lengthId = instr->getIdOperand(1);
  1402. return module.getInstruction(lengthId)->getImmediateOperand(0);
  1403. }
  1404. case Op::OpTypeStruct:
  1405. return instr->getNumOperands();
  1406. case Op::OpTypeCooperativeMatrixKHR:
  1407. case Op::OpTypeCooperativeMatrixNV:
  1408. // has only one constituent when used with OpCompositeConstruct.
  1409. return 1;
  1410. default:
  1411. assert(0);
  1412. return 1;
  1413. }
  1414. }
  1415. // Return the lowest-level type of scalar that an homogeneous composite is made out of.
  1416. // Typically, this is just to find out if something is made out of ints or floats.
  1417. // However, it includes returning a structure, if say, it is an array of structure.
  1418. Id Builder::getScalarTypeId(Id typeId) const
  1419. {
  1420. Instruction* instr = module.getInstruction(typeId);
  1421. Op typeClass = instr->getOpCode();
  1422. switch (typeClass)
  1423. {
  1424. case Op::OpTypeVoid:
  1425. case Op::OpTypeBool:
  1426. case Op::OpTypeInt:
  1427. case Op::OpTypeFloat:
  1428. case Op::OpTypeStruct:
  1429. return instr->getResultId();
  1430. case Op::OpTypeVector:
  1431. case Op::OpTypeMatrix:
  1432. case Op::OpTypeArray:
  1433. case Op::OpTypeRuntimeArray:
  1434. case Op::OpTypePointer:
  1435. case Op::OpTypeCooperativeVectorNV:
  1436. return getScalarTypeId(getContainedTypeId(typeId));
  1437. default:
  1438. assert(0);
  1439. return NoResult;
  1440. }
  1441. }
  1442. // Return the type of 'member' of a composite.
  1443. Id Builder::getContainedTypeId(Id typeId, int member) const
  1444. {
  1445. Instruction* instr = module.getInstruction(typeId);
  1446. Op typeClass = instr->getOpCode();
  1447. switch (typeClass)
  1448. {
  1449. case Op::OpTypeVector:
  1450. case Op::OpTypeMatrix:
  1451. case Op::OpTypeArray:
  1452. case Op::OpTypeRuntimeArray:
  1453. case Op::OpTypeCooperativeMatrixKHR:
  1454. case Op::OpTypeCooperativeMatrixNV:
  1455. case Op::OpTypeCooperativeVectorNV:
  1456. return instr->getIdOperand(0);
  1457. case Op::OpTypePointer:
  1458. return instr->getIdOperand(1);
  1459. case Op::OpTypeStruct:
  1460. return instr->getIdOperand(member);
  1461. default:
  1462. assert(0);
  1463. return NoResult;
  1464. }
  1465. }
  1466. // Figure out the final resulting type of the access chain.
  1467. Id Builder::getResultingAccessChainType() const
  1468. {
  1469. assert(accessChain.base != NoResult);
  1470. Id typeId = getTypeId(accessChain.base);
  1471. assert(isPointerType(typeId));
  1472. typeId = getContainedTypeId(typeId);
  1473. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  1474. if (isStructType(typeId)) {
  1475. assert(isConstantScalar(accessChain.indexChain[i]));
  1476. typeId = getContainedTypeId(typeId, getConstantScalar(accessChain.indexChain[i]));
  1477. } else
  1478. typeId = getContainedTypeId(typeId, accessChain.indexChain[i]);
  1479. }
  1480. return typeId;
  1481. }
  1482. // Return the immediately contained type of a given composite type.
  1483. Id Builder::getContainedTypeId(Id typeId) const
  1484. {
  1485. return getContainedTypeId(typeId, 0);
  1486. }
  1487. // Returns true if 'typeId' is or contains a scalar type declared with 'typeOp'
  1488. // of width 'width'. The 'width' is only consumed for int and float types.
  1489. // Returns false otherwise.
  1490. bool Builder::containsType(Id typeId, spv::Op typeOp, unsigned int width) const
  1491. {
  1492. const Instruction& instr = *module.getInstruction(typeId);
  1493. Op typeClass = instr.getOpCode();
  1494. switch (typeClass)
  1495. {
  1496. case Op::OpTypeInt:
  1497. case Op::OpTypeFloat:
  1498. return typeClass == typeOp && instr.getImmediateOperand(0) == width;
  1499. case Op::OpTypeStruct:
  1500. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1501. if (containsType(instr.getIdOperand(m), typeOp, width))
  1502. return true;
  1503. }
  1504. return false;
  1505. case Op::OpTypePointer:
  1506. return false;
  1507. case Op::OpTypeVector:
  1508. case Op::OpTypeMatrix:
  1509. case Op::OpTypeArray:
  1510. case Op::OpTypeRuntimeArray:
  1511. return containsType(getContainedTypeId(typeId), typeOp, width);
  1512. default:
  1513. return typeClass == typeOp;
  1514. }
  1515. }
  1516. // return true if the type is a pointer to PhysicalStorageBufferEXT or an
  1517. // contains such a pointer. These require restrict/aliased decorations.
  1518. bool Builder::containsPhysicalStorageBufferOrArray(Id typeId) const
  1519. {
  1520. const Instruction& instr = *module.getInstruction(typeId);
  1521. Op typeClass = instr.getOpCode();
  1522. switch (typeClass)
  1523. {
  1524. case Op::OpTypePointer:
  1525. return getTypeStorageClass(typeId) == StorageClass::PhysicalStorageBufferEXT;
  1526. case Op::OpTypeArray:
  1527. return containsPhysicalStorageBufferOrArray(getContainedTypeId(typeId));
  1528. case Op::OpTypeStruct:
  1529. for (int m = 0; m < instr.getNumOperands(); ++m) {
  1530. if (containsPhysicalStorageBufferOrArray(instr.getIdOperand(m)))
  1531. return true;
  1532. }
  1533. return false;
  1534. default:
  1535. return false;
  1536. }
  1537. }
  1538. // See if a scalar constant of this type has already been created, so it
  1539. // can be reused rather than duplicated. (Required by the specification).
  1540. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned value)
  1541. {
  1542. ScalarConstantKey key{ enumCast(typeClass), enumCast(opcode), typeId, value, 0 };
  1543. auto it = groupedScalarConstantResultIDs.find(key);
  1544. return (it != groupedScalarConstantResultIDs.end()) ? it->second : 0;
  1545. }
  1546. // Version of findScalarConstant (see above) for scalars that take two operands (e.g. a 'double' or 'int64').
  1547. Id Builder::findScalarConstant(Op typeClass, Op opcode, Id typeId, unsigned v1, unsigned v2)
  1548. {
  1549. ScalarConstantKey key{ enumCast(typeClass), enumCast(opcode), typeId, v1, v2 };
  1550. auto it = groupedScalarConstantResultIDs.find(key);
  1551. return (it != groupedScalarConstantResultIDs.end()) ? it->second : 0;
  1552. }
  1553. // Return true if consuming 'opcode' means consuming a constant.
  1554. // "constant" here means after final transform to executable code,
  1555. // the value consumed will be a constant, so includes specialization.
  1556. bool Builder::isConstantOpCode(Op opcode) const
  1557. {
  1558. switch (opcode) {
  1559. case Op::OpUndef:
  1560. case Op::OpConstantTrue:
  1561. case Op::OpConstantFalse:
  1562. case Op::OpConstant:
  1563. case Op::OpConstantComposite:
  1564. case Op::OpConstantCompositeReplicateEXT:
  1565. case Op::OpConstantSampler:
  1566. case Op::OpConstantNull:
  1567. case Op::OpSpecConstantTrue:
  1568. case Op::OpSpecConstantFalse:
  1569. case Op::OpSpecConstant:
  1570. case Op::OpSpecConstantComposite:
  1571. case Op::OpSpecConstantCompositeReplicateEXT:
  1572. case Op::OpSpecConstantOp:
  1573. case Op::OpConstantSizeOfEXT:
  1574. return true;
  1575. default:
  1576. return false;
  1577. }
  1578. }
  1579. // Return true if consuming 'opcode' means consuming a specialization constant.
  1580. bool Builder::isSpecConstantOpCode(Op opcode) const
  1581. {
  1582. switch (opcode) {
  1583. case Op::OpSpecConstantTrue:
  1584. case Op::OpSpecConstantFalse:
  1585. case Op::OpSpecConstant:
  1586. case Op::OpSpecConstantComposite:
  1587. case Op::OpSpecConstantOp:
  1588. case Op::OpSpecConstantCompositeReplicateEXT:
  1589. return true;
  1590. default:
  1591. return false;
  1592. }
  1593. }
  1594. Id Builder::makeNullConstant(Id typeId)
  1595. {
  1596. Instruction* constant;
  1597. // See if we already made it.
  1598. Id existing = NoResult;
  1599. for (int i = 0; i < (int)nullConstants.size(); ++i) {
  1600. constant = nullConstants[i];
  1601. if (constant->getTypeId() == typeId)
  1602. existing = constant->getResultId();
  1603. }
  1604. if (existing != NoResult)
  1605. return existing;
  1606. // Make it
  1607. Instruction* c = new Instruction(getUniqueId(), typeId, Op::OpConstantNull);
  1608. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1609. nullConstants.push_back(c);
  1610. module.mapInstruction(c);
  1611. return c->getResultId();
  1612. }
  1613. Id Builder::makeBoolConstant(bool b, bool specConstant)
  1614. {
  1615. Id typeId = makeBoolType();
  1616. Op opcode = specConstant ? (b ? Op::OpSpecConstantTrue : Op::OpSpecConstantFalse) : (b ? Op::OpConstantTrue : Op::OpConstantFalse);
  1617. // See if we already made it. Applies only to regular constants, because specialization constants
  1618. // must remain distinct for the purpose of applying a SpecId decoration.
  1619. if (!specConstant) {
  1620. Id existing = findScalarConstant(Op::OpTypeBool, opcode, typeId, 0);
  1621. if (existing)
  1622. return existing;
  1623. }
  1624. // Make it
  1625. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1626. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1627. module.mapInstruction(c);
  1628. Id resultId = c->getResultId();
  1629. if (!specConstant) {
  1630. ScalarConstantKey key{enumCast(Op::OpTypeBool), enumCast(opcode), typeId, 0, 0};
  1631. groupedScalarConstantResultIDs[key] = resultId;
  1632. }
  1633. return resultId;
  1634. }
  1635. Id Builder::makeIntConstant(Id typeId, unsigned value, bool specConstant)
  1636. {
  1637. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1638. // See if we already made it. Applies only to regular constants, because specialization constants
  1639. // must remain distinct for the purpose of applying a SpecId decoration.
  1640. if (! specConstant) {
  1641. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, value);
  1642. if (existing)
  1643. return existing;
  1644. }
  1645. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1646. c->addImmediateOperand(value);
  1647. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1648. module.mapInstruction(c);
  1649. Id resultId = c->getResultId();
  1650. if (!specConstant) {
  1651. ScalarConstantKey key{ enumCast(Op::OpTypeInt), enumCast(opcode), typeId, value, 0 };
  1652. groupedScalarConstantResultIDs[key] = resultId;
  1653. }
  1654. return resultId;
  1655. }
  1656. Id Builder::makeInt64Constant(Id typeId, unsigned long long value, bool specConstant)
  1657. {
  1658. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1659. unsigned op1 = value & 0xFFFFFFFF;
  1660. unsigned op2 = value >> 32;
  1661. // See if we already made it. Applies only to regular constants, because specialization constants
  1662. // must remain distinct for the purpose of applying a SpecId decoration.
  1663. if (! specConstant) {
  1664. Id existing = findScalarConstant(Op::OpTypeInt, opcode, typeId, op1, op2);
  1665. if (existing)
  1666. return existing;
  1667. }
  1668. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1669. c->reserveOperands(2);
  1670. c->addImmediateOperand(op1);
  1671. c->addImmediateOperand(op2);
  1672. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1673. module.mapInstruction(c);
  1674. Id resultId = c->getResultId();
  1675. if (!specConstant) {
  1676. ScalarConstantKey key{ enumCast(Op::OpTypeInt), enumCast(opcode), typeId, op1, op2 };
  1677. groupedScalarConstantResultIDs[key] = resultId;
  1678. }
  1679. return resultId;
  1680. }
  1681. Id Builder::makeFloatConstant(float f, bool specConstant)
  1682. {
  1683. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1684. Id typeId = makeFloatType(32);
  1685. union { float fl; unsigned int ui; } u;
  1686. u.fl = f;
  1687. unsigned value = u.ui;
  1688. // See if we already made it. Applies only to regular constants, because specialization constants
  1689. // must remain distinct for the purpose of applying a SpecId decoration.
  1690. if (! specConstant) {
  1691. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1692. if (existing)
  1693. return existing;
  1694. }
  1695. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1696. c->addImmediateOperand(value);
  1697. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1698. module.mapInstruction(c);
  1699. Id resultId = c->getResultId();
  1700. if (!specConstant) {
  1701. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1702. groupedScalarConstantResultIDs[key] = resultId;
  1703. }
  1704. return resultId;
  1705. }
  1706. Id Builder::makeDoubleConstant(double d, bool specConstant)
  1707. {
  1708. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1709. Id typeId = makeFloatType(64);
  1710. union { double db; unsigned long long ull; } u;
  1711. u.db = d;
  1712. unsigned long long value = u.ull;
  1713. unsigned op1 = value & 0xFFFFFFFF;
  1714. unsigned op2 = value >> 32;
  1715. // See if we already made it. Applies only to regular constants, because specialization constants
  1716. // must remain distinct for the purpose of applying a SpecId decoration.
  1717. if (! specConstant) {
  1718. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, op1, op2);
  1719. if (existing)
  1720. return existing;
  1721. }
  1722. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1723. c->reserveOperands(2);
  1724. c->addImmediateOperand(op1);
  1725. c->addImmediateOperand(op2);
  1726. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1727. module.mapInstruction(c);
  1728. Id resultId = c->getResultId();
  1729. if (!specConstant) {
  1730. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, op1, op2 };
  1731. groupedScalarConstantResultIDs[key] = resultId;
  1732. }
  1733. return resultId;
  1734. }
  1735. Id Builder::makeFloat16Constant(float f16, bool specConstant)
  1736. {
  1737. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1738. Id typeId = makeFloatType(16);
  1739. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(f16);
  1740. spvutils::HexFloat<spvutils::FloatProxy<spvutils::Float16>> f16Val(0);
  1741. fVal.castTo(f16Val, spvutils::kRoundToZero);
  1742. unsigned value = f16Val.value().getAsFloat().get_value();
  1743. // See if we already made it. Applies only to regular constants, because specialization constants
  1744. // must remain distinct for the purpose of applying a SpecId decoration.
  1745. if (!specConstant) {
  1746. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1747. if (existing)
  1748. return existing;
  1749. }
  1750. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1751. c->addImmediateOperand(value);
  1752. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1753. module.mapInstruction(c);
  1754. Id resultId = c->getResultId();
  1755. if (!specConstant) {
  1756. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1757. groupedScalarConstantResultIDs[key] = resultId;
  1758. }
  1759. return resultId;
  1760. }
  1761. Id Builder::makeBFloat16Constant(float bf16, bool specConstant)
  1762. {
  1763. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1764. Id typeId = makeBFloat16Type();
  1765. union {
  1766. float f;
  1767. uint32_t u;
  1768. } un;
  1769. un.f = bf16;
  1770. // take high 16b of fp32 value. This is effectively round-to-zero, other than certain NaNs.
  1771. unsigned value = un.u >> 16;
  1772. // See if we already made it. Applies only to regular constants, because specialization constants
  1773. // must remain distinct for the purpose of applying a SpecId decoration.
  1774. if (!specConstant) {
  1775. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1776. if (existing)
  1777. return existing;
  1778. }
  1779. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1780. c->addImmediateOperand(value);
  1781. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1782. module.mapInstruction(c);
  1783. Id resultId = c->getResultId();
  1784. if (!specConstant) {
  1785. ScalarConstantKey key{ enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0 };
  1786. groupedScalarConstantResultIDs[key] = resultId;
  1787. }
  1788. return resultId;
  1789. }
  1790. Id Builder::makeFloatE5M2Constant(float fe5m2, bool specConstant)
  1791. {
  1792. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1793. Id typeId = makeFloatE5M2Type();
  1794. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe5m2);
  1795. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE5M2>> fe5m2Val(0);
  1796. fVal.castTo(fe5m2Val, spvutils::kRoundToZero);
  1797. unsigned value = fe5m2Val.value().getAsFloat().get_value();
  1798. // See if we already made it. Applies only to regular constants, because specialization constants
  1799. // must remain distinct for the purpose of applying a SpecId decoration.
  1800. if (!specConstant) {
  1801. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1802. if (existing)
  1803. return existing;
  1804. }
  1805. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1806. c->addImmediateOperand(value);
  1807. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1808. module.mapInstruction(c);
  1809. Id resultId = c->getResultId();
  1810. if (!specConstant) {
  1811. ScalarConstantKey key{enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0};
  1812. groupedScalarConstantResultIDs[key] = resultId;
  1813. }
  1814. return resultId;
  1815. }
  1816. Id Builder::makeFloatE4M3Constant(float fe4m3, bool specConstant)
  1817. {
  1818. Op opcode = specConstant ? Op::OpSpecConstant : Op::OpConstant;
  1819. Id typeId = makeFloatE4M3Type();
  1820. spvutils::HexFloat<spvutils::FloatProxy<float>> fVal(fe4m3);
  1821. spvutils::HexFloat<spvutils::FloatProxy<spvutils::FloatE4M3>> fe4m3Val(0);
  1822. fVal.castTo(fe4m3Val, spvutils::kRoundToZero);
  1823. unsigned value = fe4m3Val.value().getAsFloat().get_value();
  1824. // See if we already made it. Applies only to regular constants, because specialization constants
  1825. // must remain distinct for the purpose of applying a SpecId decoration.
  1826. if (!specConstant) {
  1827. Id existing = findScalarConstant(Op::OpTypeFloat, opcode, typeId, value);
  1828. if (existing)
  1829. return existing;
  1830. }
  1831. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1832. c->addImmediateOperand(value);
  1833. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1834. module.mapInstruction(c);
  1835. Id resultId = c->getResultId();
  1836. if (!specConstant) {
  1837. ScalarConstantKey key{enumCast(Op::OpTypeFloat), enumCast(opcode), typeId, value, 0};
  1838. groupedScalarConstantResultIDs[key] = resultId;
  1839. }
  1840. return resultId;
  1841. }
  1842. Id Builder::makeFpConstant(Id type, double d, bool specConstant)
  1843. {
  1844. const int width = getScalarTypeWidth(type);
  1845. assert(isFloatType(type));
  1846. switch (width) {
  1847. case 16:
  1848. return makeFloat16Constant((float)d, specConstant);
  1849. case 32:
  1850. return makeFloatConstant((float)d, specConstant);
  1851. case 64:
  1852. return makeDoubleConstant(d, specConstant);
  1853. default:
  1854. break;
  1855. }
  1856. assert(false);
  1857. return NoResult;
  1858. }
  1859. Id Builder::importNonSemanticShaderDebugInfoInstructions()
  1860. {
  1861. assert(emitNonSemanticShaderDebugInfo == true);
  1862. if(nonSemanticShaderDebugInfo == 0)
  1863. {
  1864. this->addExtension(spv::E_SPV_KHR_non_semantic_info);
  1865. nonSemanticShaderDebugInfo = this->import("NonSemantic.Shader.DebugInfo.100");
  1866. }
  1867. return nonSemanticShaderDebugInfo;
  1868. }
  1869. Id Builder::findCompositeConstant(Op typeClass, Op opcode, Id typeId, const std::vector<Id>& comps, size_t numMembers)
  1870. {
  1871. Instruction* constant = nullptr;
  1872. bool found = false;
  1873. for (int i = 0; i < (int)groupedCompositeConstants[enumCast(typeClass)].size(); ++i) {
  1874. constant = groupedCompositeConstants[enumCast(typeClass)][i];
  1875. if (constant->getTypeId() != typeId)
  1876. continue;
  1877. if (constant->getOpCode() != opcode) {
  1878. continue;
  1879. }
  1880. if (constant->getNumOperands() != (int)numMembers)
  1881. continue;
  1882. // same contents?
  1883. bool mismatch = false;
  1884. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1885. if (constant->getIdOperand(op) != comps[op]) {
  1886. mismatch = true;
  1887. break;
  1888. }
  1889. }
  1890. if (! mismatch) {
  1891. found = true;
  1892. break;
  1893. }
  1894. }
  1895. return found ? constant->getResultId() : NoResult;
  1896. }
  1897. Id Builder::findStructConstant(Id typeId, const std::vector<Id>& comps)
  1898. {
  1899. Instruction* constant = nullptr;
  1900. bool found = false;
  1901. for (int i = 0; i < (int)groupedStructConstants[typeId].size(); ++i) {
  1902. constant = groupedStructConstants[typeId][i];
  1903. // same contents?
  1904. bool mismatch = false;
  1905. for (int op = 0; op < constant->getNumOperands(); ++op) {
  1906. if (constant->getIdOperand(op) != comps[op]) {
  1907. mismatch = true;
  1908. break;
  1909. }
  1910. }
  1911. if (! mismatch) {
  1912. found = true;
  1913. break;
  1914. }
  1915. }
  1916. return found ? constant->getResultId() : NoResult;
  1917. }
  1918. // Comments in header
  1919. Id Builder::makeCompositeConstant(Id typeId, const std::vector<Id>& members, bool specConstant)
  1920. {
  1921. assert(typeId);
  1922. Op typeClass = getTypeClass(typeId);
  1923. bool replicate = false;
  1924. size_t numMembers = members.size();
  1925. if (useReplicatedComposites || typeClass == Op::OpTypeCooperativeVectorNV) {
  1926. // use replicate if all members are the same
  1927. replicate = numMembers > 0 &&
  1928. std::equal(members.begin() + 1, members.end(), members.begin());
  1929. if (replicate) {
  1930. numMembers = 1;
  1931. addCapability(spv::Capability::ReplicatedCompositesEXT);
  1932. addExtension(spv::E_SPV_EXT_replicated_composites);
  1933. }
  1934. }
  1935. Op opcode = replicate ?
  1936. (specConstant ? Op::OpSpecConstantCompositeReplicateEXT : Op::OpConstantCompositeReplicateEXT) :
  1937. (specConstant ? Op::OpSpecConstantComposite : Op::OpConstantComposite);
  1938. switch (typeClass) {
  1939. case Op::OpTypeVector:
  1940. case Op::OpTypeArray:
  1941. case Op::OpTypeMatrix:
  1942. case Op::OpTypeCooperativeMatrixKHR:
  1943. case Op::OpTypeCooperativeMatrixNV:
  1944. case Op::OpTypeCooperativeVectorNV:
  1945. if (! specConstant) {
  1946. Id existing = findCompositeConstant(typeClass, opcode, typeId, members, numMembers);
  1947. if (existing)
  1948. return existing;
  1949. }
  1950. break;
  1951. case Op::OpTypeStruct:
  1952. if (! specConstant) {
  1953. Id existing = findStructConstant(typeId, members);
  1954. if (existing)
  1955. return existing;
  1956. }
  1957. break;
  1958. default:
  1959. assert(0);
  1960. return makeFloatConstant(0.0);
  1961. }
  1962. Instruction* c = new Instruction(getUniqueId(), typeId, opcode);
  1963. c->reserveOperands(members.size());
  1964. for (size_t op = 0; op < numMembers; ++op)
  1965. c->addIdOperand(members[op]);
  1966. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(c));
  1967. if (typeClass == Op::OpTypeStruct)
  1968. groupedStructConstants[typeId].push_back(c);
  1969. else
  1970. groupedCompositeConstants[enumCast(typeClass)].push_back(c);
  1971. module.mapInstruction(c);
  1972. return c->getResultId();
  1973. }
  1974. Instruction* Builder::addEntryPoint(ExecutionModel model, Function* function, const char* name)
  1975. {
  1976. Instruction* entryPoint = new Instruction(Op::OpEntryPoint);
  1977. entryPoint->reserveOperands(3);
  1978. entryPoint->addImmediateOperand(model);
  1979. entryPoint->addIdOperand(function->getId());
  1980. entryPoint->addStringOperand(name);
  1981. entryPoints.push_back(std::unique_ptr<Instruction>(entryPoint));
  1982. return entryPoint;
  1983. }
  1984. // Currently relying on the fact that all 'value' of interest are small non-negative values.
  1985. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, int value1, int value2, int value3)
  1986. {
  1987. // entryPoint can be null if we are in compile-only mode
  1988. if (!entryPoint)
  1989. return;
  1990. Instruction* instr = new Instruction(Op::OpExecutionMode);
  1991. instr->reserveOperands(3);
  1992. instr->addIdOperand(entryPoint->getId());
  1993. instr->addImmediateOperand(mode);
  1994. if (value1 >= 0)
  1995. instr->addImmediateOperand(value1);
  1996. if (value2 >= 0)
  1997. instr->addImmediateOperand(value2);
  1998. if (value3 >= 0)
  1999. instr->addImmediateOperand(value3);
  2000. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  2001. }
  2002. void Builder::addExecutionMode(Function* entryPoint, ExecutionMode mode, const std::vector<unsigned>& literals)
  2003. {
  2004. // entryPoint can be null if we are in compile-only mode
  2005. if (!entryPoint)
  2006. return;
  2007. Instruction* instr = new Instruction(Op::OpExecutionMode);
  2008. instr->reserveOperands(literals.size() + 2);
  2009. instr->addIdOperand(entryPoint->getId());
  2010. instr->addImmediateOperand(mode);
  2011. for (auto literal : literals)
  2012. instr->addImmediateOperand(literal);
  2013. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  2014. }
  2015. void Builder::addExecutionModeId(Function* entryPoint, ExecutionMode mode, const std::vector<Id>& operandIds)
  2016. {
  2017. // entryPoint can be null if we are in compile-only mode
  2018. if (!entryPoint)
  2019. return;
  2020. Instruction* instr = new Instruction(Op::OpExecutionModeId);
  2021. instr->reserveOperands(operandIds.size() + 2);
  2022. instr->addIdOperand(entryPoint->getId());
  2023. instr->addImmediateOperand(mode);
  2024. for (auto operandId : operandIds)
  2025. instr->addIdOperand(operandId);
  2026. executionModes.push_back(std::unique_ptr<Instruction>(instr));
  2027. }
  2028. void Builder::addName(Id id, const char* string)
  2029. {
  2030. Instruction* name = new Instruction(Op::OpName);
  2031. name->reserveOperands(2);
  2032. name->addIdOperand(id);
  2033. name->addStringOperand(string);
  2034. names.push_back(std::unique_ptr<Instruction>(name));
  2035. }
  2036. void Builder::addMemberName(Id id, int memberNumber, const char* string)
  2037. {
  2038. Instruction* name = new Instruction(Op::OpMemberName);
  2039. name->reserveOperands(3);
  2040. name->addIdOperand(id);
  2041. name->addImmediateOperand(memberNumber);
  2042. name->addStringOperand(string);
  2043. names.push_back(std::unique_ptr<Instruction>(name));
  2044. }
  2045. void Builder::addDecoration(Id id, Decoration decoration, int num)
  2046. {
  2047. if (decoration == spv::Decoration::Max)
  2048. return;
  2049. Instruction* dec = new Instruction(Op::OpDecorate);
  2050. dec->reserveOperands(2);
  2051. dec->addIdOperand(id);
  2052. dec->addImmediateOperand(decoration);
  2053. if (num >= 0)
  2054. dec->addImmediateOperand(num);
  2055. decorations.insert(std::unique_ptr<Instruction>(dec));
  2056. }
  2057. void Builder::addDecoration(Id id, Decoration decoration, const char* s)
  2058. {
  2059. if (decoration == spv::Decoration::Max)
  2060. return;
  2061. Instruction* dec = new Instruction(Op::OpDecorateString);
  2062. dec->reserveOperands(3);
  2063. dec->addIdOperand(id);
  2064. dec->addImmediateOperand(decoration);
  2065. dec->addStringOperand(s);
  2066. decorations.insert(std::unique_ptr<Instruction>(dec));
  2067. }
  2068. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<unsigned>& literals)
  2069. {
  2070. if (decoration == spv::Decoration::Max)
  2071. return;
  2072. Instruction* dec = new Instruction(Op::OpDecorate);
  2073. dec->reserveOperands(literals.size() + 2);
  2074. dec->addIdOperand(id);
  2075. dec->addImmediateOperand(decoration);
  2076. for (auto literal : literals)
  2077. dec->addImmediateOperand(literal);
  2078. decorations.insert(std::unique_ptr<Instruction>(dec));
  2079. }
  2080. void Builder::addDecoration(Id id, Decoration decoration, const std::vector<const char*>& strings)
  2081. {
  2082. if (decoration == spv::Decoration::Max)
  2083. return;
  2084. Instruction* dec = new Instruction(Op::OpDecorateString);
  2085. dec->reserveOperands(strings.size() + 2);
  2086. dec->addIdOperand(id);
  2087. dec->addImmediateOperand(decoration);
  2088. for (auto string : strings)
  2089. dec->addStringOperand(string);
  2090. decorations.insert(std::unique_ptr<Instruction>(dec));
  2091. }
  2092. void Builder::addLinkageDecoration(Id id, const char* name, spv::LinkageType linkType) {
  2093. Instruction* dec = new Instruction(Op::OpDecorate);
  2094. dec->reserveOperands(4);
  2095. dec->addIdOperand(id);
  2096. dec->addImmediateOperand(spv::Decoration::LinkageAttributes);
  2097. dec->addStringOperand(name);
  2098. dec->addImmediateOperand(linkType);
  2099. decorations.insert(std::unique_ptr<Instruction>(dec));
  2100. }
  2101. void Builder::addDecorationId(Id id, Decoration decoration, Id idDecoration)
  2102. {
  2103. if (decoration == spv::Decoration::Max)
  2104. return;
  2105. Instruction* dec = new Instruction(Op::OpDecorateId);
  2106. dec->reserveOperands(3);
  2107. dec->addIdOperand(id);
  2108. dec->addImmediateOperand(decoration);
  2109. dec->addIdOperand(idDecoration);
  2110. decorations.insert(std::unique_ptr<Instruction>(dec));
  2111. }
  2112. void Builder::addDecorationId(Id id, Decoration decoration, const std::vector<Id>& operandIds)
  2113. {
  2114. if(decoration == spv::Decoration::Max)
  2115. return;
  2116. Instruction* dec = new Instruction(Op::OpDecorateId);
  2117. dec->reserveOperands(operandIds.size() + 2);
  2118. dec->addIdOperand(id);
  2119. dec->addImmediateOperand(decoration);
  2120. for (auto operandId : operandIds)
  2121. dec->addIdOperand(operandId);
  2122. decorations.insert(std::unique_ptr<Instruction>(dec));
  2123. }
  2124. void Builder::addMemberDecorationIdEXT(Id id, unsigned int member, Decoration decoration,
  2125. const std::vector<unsigned>& operands)
  2126. {
  2127. if (decoration == spv::Decoration::Max)
  2128. return;
  2129. Instruction* dec = new Instruction(Op::OpMemberDecorateIdEXT);
  2130. dec->reserveOperands(operands.size() + 3);
  2131. dec->addIdOperand(id);
  2132. dec->addImmediateOperand(member);
  2133. dec->addImmediateOperand(decoration);
  2134. for (auto operand : operands)
  2135. dec->addIdOperand(operand);
  2136. decorations.insert(std::unique_ptr<Instruction>(dec));
  2137. }
  2138. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, int num)
  2139. {
  2140. if (decoration == spv::Decoration::Max)
  2141. return;
  2142. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2143. dec->reserveOperands(3);
  2144. dec->addIdOperand(id);
  2145. dec->addImmediateOperand(member);
  2146. dec->addImmediateOperand(decoration);
  2147. if (num >= 0)
  2148. dec->addImmediateOperand(num);
  2149. decorations.insert(std::unique_ptr<Instruction>(dec));
  2150. }
  2151. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const char *s)
  2152. {
  2153. if (decoration == spv::Decoration::Max)
  2154. return;
  2155. Instruction* dec = new Instruction(Op::OpMemberDecorateStringGOOGLE);
  2156. dec->reserveOperands(4);
  2157. dec->addIdOperand(id);
  2158. dec->addImmediateOperand(member);
  2159. dec->addImmediateOperand(decoration);
  2160. dec->addStringOperand(s);
  2161. decorations.insert(std::unique_ptr<Instruction>(dec));
  2162. }
  2163. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<unsigned>& literals)
  2164. {
  2165. if (decoration == spv::Decoration::Max)
  2166. return;
  2167. Instruction* dec = new Instruction(Op::OpMemberDecorate);
  2168. dec->reserveOperands(literals.size() + 3);
  2169. dec->addIdOperand(id);
  2170. dec->addImmediateOperand(member);
  2171. dec->addImmediateOperand(decoration);
  2172. for (auto literal : literals)
  2173. dec->addImmediateOperand(literal);
  2174. decorations.insert(std::unique_ptr<Instruction>(dec));
  2175. }
  2176. void Builder::addMemberDecoration(Id id, unsigned int member, Decoration decoration, const std::vector<const char*>& strings)
  2177. {
  2178. if (decoration == spv::Decoration::Max)
  2179. return;
  2180. Instruction* dec = new Instruction(Op::OpMemberDecorateString);
  2181. dec->reserveOperands(strings.size() + 3);
  2182. dec->addIdOperand(id);
  2183. dec->addImmediateOperand(member);
  2184. dec->addImmediateOperand(decoration);
  2185. for (auto string : strings)
  2186. dec->addStringOperand(string);
  2187. decorations.insert(std::unique_ptr<Instruction>(dec));
  2188. }
  2189. void Builder::addInstruction(std::unique_ptr<Instruction> inst) {
  2190. // Phis must appear first in their block, don't insert line tracking instructions
  2191. // in front of them, just add the OpPhi and return.
  2192. if (inst->getOpCode() == Op::OpPhi) {
  2193. buildPoint->addInstruction(std::move(inst));
  2194. return;
  2195. }
  2196. // Optionally insert OpDebugScope
  2197. if (emitNonSemanticShaderDebugInfo && dirtyScopeTracker) {
  2198. if (buildPoint->updateDebugScope(currentDebugScopeId.top())) {
  2199. auto scopeInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2200. scopeInst->reserveOperands(3);
  2201. scopeInst->addIdOperand(nonSemanticShaderDebugInfo);
  2202. scopeInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugScope);
  2203. scopeInst->addIdOperand(currentDebugScopeId.top());
  2204. buildPoint->addInstruction(std::move(scopeInst));
  2205. }
  2206. dirtyScopeTracker = false;
  2207. }
  2208. // Insert OpLine/OpDebugLine if the debug source location has changed
  2209. if (trackDebugInfo && dirtyLineTracker) {
  2210. if (buildPoint->updateDebugSourceLocation(currentLine, 0, currentFileId)) {
  2211. if (emitSpirvDebugInfo) {
  2212. auto lineInst = std::make_unique<Instruction>(Op::OpLine);
  2213. lineInst->reserveOperands(3);
  2214. lineInst->addIdOperand(currentFileId);
  2215. lineInst->addImmediateOperand(currentLine);
  2216. lineInst->addImmediateOperand(0);
  2217. buildPoint->addInstruction(std::move(lineInst));
  2218. }
  2219. if (emitNonSemanticShaderDebugInfo) {
  2220. auto lineInst = std::make_unique<Instruction>(getUniqueId(), makeVoidType(), Op::OpExtInst);
  2221. lineInst->reserveOperands(7);
  2222. lineInst->addIdOperand(nonSemanticShaderDebugInfo);
  2223. lineInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLine);
  2224. lineInst->addIdOperand(makeDebugSource(currentFileId));
  2225. lineInst->addIdOperand(makeUintConstant(currentLine));
  2226. lineInst->addIdOperand(makeUintConstant(currentLine));
  2227. lineInst->addIdOperand(makeUintConstant(0));
  2228. lineInst->addIdOperand(makeUintConstant(0));
  2229. buildPoint->addInstruction(std::move(lineInst));
  2230. }
  2231. }
  2232. dirtyLineTracker = false;
  2233. }
  2234. buildPoint->addInstruction(std::move(inst));
  2235. }
  2236. void Builder::addInstructionNoDebugInfo(std::unique_ptr<Instruction> inst) {
  2237. buildPoint->addInstruction(std::move(inst));
  2238. }
  2239. // Comments in header
  2240. Function* Builder::makeEntryPoint(const char* entryPoint)
  2241. {
  2242. assert(! entryPointFunction);
  2243. auto const returnType = makeVoidType();
  2244. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2245. if(sourceLang == spv::SourceLanguage::HLSL) {
  2246. emitNonSemanticShaderDebugInfo = false;
  2247. }
  2248. Block* entry = nullptr;
  2249. entryPointFunction = makeFunctionEntry(NoPrecision, returnType, entryPoint, LinkageType::Max, {}, {}, &entry);
  2250. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2251. return entryPointFunction;
  2252. }
  2253. // Comments in header
  2254. Function* Builder::makeFunctionEntry(Decoration precision, Id returnType, const char* name, LinkageType linkType,
  2255. const std::vector<Id>& paramTypes,
  2256. const std::vector<std::vector<Decoration>>& decorations, Block** entry)
  2257. {
  2258. // Make the function and initial instructions in it
  2259. Id typeId = makeFunctionType(returnType, paramTypes);
  2260. Id firstParamId = paramTypes.size() == 0 ? 0 : getUniqueIds((int)paramTypes.size());
  2261. Id funcId = getUniqueId();
  2262. Function* function = new Function(funcId, returnType, typeId, firstParamId, linkType, name, module);
  2263. // Set up the precisions
  2264. setPrecision(function->getId(), precision);
  2265. function->setReturnPrecision(precision);
  2266. for (unsigned p = 0; p < (unsigned)decorations.size(); ++p) {
  2267. for (int d = 0; d < (int)decorations[p].size(); ++d) {
  2268. addDecoration(firstParamId + p, decorations[p][d]);
  2269. function->addParamPrecision(p, decorations[p][d]);
  2270. }
  2271. }
  2272. // reset last debug scope
  2273. if (emitNonSemanticShaderDebugInfo) {
  2274. dirtyScopeTracker = true;
  2275. }
  2276. // CFG
  2277. assert(entry != nullptr);
  2278. *entry = new Block(getUniqueId(), *function);
  2279. function->addBlock(*entry);
  2280. setBuildPoint(*entry);
  2281. if (name)
  2282. addName(function->getId(), name);
  2283. functions.push_back(std::unique_ptr<Function>(function));
  2284. return function;
  2285. }
  2286. void Builder::setupFunctionDebugInfo(Function* function, const char* name, const std::vector<Id>& paramTypes,
  2287. const std::vector<char const*>& paramNames)
  2288. {
  2289. if (!emitNonSemanticShaderDebugInfo)
  2290. return;
  2291. Id nameId = getStringId(unmangleFunctionName(name));
  2292. Id funcTypeId = function->getFuncTypeId();
  2293. assert(getDebugType(funcTypeId) != NoType);
  2294. Id funcId = function->getId();
  2295. assert(funcId != 0);
  2296. // Make the debug function instruction
  2297. Id debugFuncId = makeDebugFunction(function, nameId, funcTypeId);
  2298. debugFuncIdLookup[funcId] = debugFuncId;
  2299. currentDebugScopeId.push(debugFuncId);
  2300. // DebugScope and DebugLine for parameter DebugDeclares
  2301. assert(paramTypes.size() == paramNames.size());
  2302. if ((int)paramTypes.size() > 0) {
  2303. Id firstParamId = function->getParamId(0);
  2304. for (size_t p = 0; p < paramTypes.size(); ++p) {
  2305. bool passByRef = false;
  2306. Id paramTypeId = paramTypes[p];
  2307. // For pointer-typed parameters, they are actually passed by reference and we need unwrap the pointer to get the actual parameter type.
  2308. if (isPointerType(paramTypeId) || isArrayType(paramTypeId)) {
  2309. passByRef = true;
  2310. paramTypeId = getContainedTypeId(paramTypeId);
  2311. }
  2312. auto const& paramName = paramNames[p];
  2313. auto const debugLocalVariableId = createDebugLocalVariable(getDebugType(paramTypeId), paramName, p + 1);
  2314. auto const paramId = static_cast<Id>(firstParamId + p);
  2315. if (passByRef) {
  2316. makeDebugDeclare(debugLocalVariableId, paramId);
  2317. } else {
  2318. makeDebugValue(debugLocalVariableId, paramId);
  2319. }
  2320. }
  2321. }
  2322. // Clear debug scope stack
  2323. if (emitNonSemanticShaderDebugInfo)
  2324. currentDebugScopeId.pop();
  2325. }
  2326. Id Builder::makeDebugFunction([[maybe_unused]] Function* function, Id nameId, Id funcTypeId)
  2327. {
  2328. assert(function != nullptr);
  2329. assert(nameId != 0);
  2330. assert(funcTypeId != 0);
  2331. assert(getDebugType(funcTypeId) != NoType);
  2332. Id funcId = getUniqueId();
  2333. auto type = new Instruction(funcId, makeVoidType(), Op::OpExtInst);
  2334. type->reserveOperands(11);
  2335. type->addIdOperand(nonSemanticShaderDebugInfo);
  2336. type->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunction);
  2337. type->addIdOperand(nameId);
  2338. type->addIdOperand(getDebugType(funcTypeId));
  2339. type->addIdOperand(makeDebugSource(currentFileId)); // TODO: This points to file of definition instead of declaration
  2340. type->addIdOperand(makeUintConstant(currentLine)); // TODO: This points to line of definition instead of declaration
  2341. type->addIdOperand(makeUintConstant(0)); // column
  2342. type->addIdOperand(makeDebugCompilationUnit()); // scope
  2343. type->addIdOperand(nameId); // linkage name
  2344. type->addIdOperand(makeUintConstant(NonSemanticShaderDebugInfo100FlagIsPublic));
  2345. type->addIdOperand(makeUintConstant(currentLine));
  2346. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(type));
  2347. module.mapInstruction(type);
  2348. return funcId;
  2349. }
  2350. Id Builder::makeDebugLexicalBlock(uint32_t line, uint32_t column) {
  2351. assert(!currentDebugScopeId.empty());
  2352. Id lexId = getUniqueId();
  2353. auto lex = new Instruction(lexId, makeVoidType(), Op::OpExtInst);
  2354. lex->reserveOperands(6);
  2355. lex->addIdOperand(nonSemanticShaderDebugInfo);
  2356. lex->addImmediateOperand(NonSemanticShaderDebugInfo100DebugLexicalBlock);
  2357. lex->addIdOperand(makeDebugSource(currentFileId));
  2358. lex->addIdOperand(makeUintConstant(line));
  2359. lex->addIdOperand(makeUintConstant(column)); // column
  2360. lex->addIdOperand(currentDebugScopeId.top()); // scope
  2361. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(lex));
  2362. module.mapInstruction(lex);
  2363. return lexId;
  2364. }
  2365. std::string Builder::unmangleFunctionName(std::string const& name) const
  2366. {
  2367. assert(name.length() > 0);
  2368. if(name.rfind('(') != std::string::npos) {
  2369. return name.substr(0, name.rfind('('));
  2370. } else {
  2371. return name;
  2372. }
  2373. }
  2374. // Comments in header
  2375. void Builder::makeReturn(bool implicit, Id retVal)
  2376. {
  2377. if (retVal) {
  2378. Instruction* inst = new Instruction(NoResult, NoType, Op::OpReturnValue);
  2379. inst->addIdOperand(retVal);
  2380. addInstruction(std::unique_ptr<Instruction>(inst));
  2381. } else
  2382. addInstruction(std::unique_ptr<Instruction>(new Instruction(NoResult, NoType, Op::OpReturn)));
  2383. if (! implicit)
  2384. createAndSetNoPredecessorBlock("post-return");
  2385. }
  2386. // Comments in header
  2387. void Builder::enterLexicalBlock(uint32_t line, uint32_t column)
  2388. {
  2389. if (!emitNonSemanticShaderDebugInfo) {
  2390. return;
  2391. }
  2392. // Generate new lexical scope debug instruction
  2393. Id lexId = makeDebugLexicalBlock(line, column);
  2394. currentDebugScopeId.push(lexId);
  2395. dirtyScopeTracker = true;
  2396. }
  2397. // Comments in header
  2398. void Builder::leaveLexicalBlock()
  2399. {
  2400. if (!emitNonSemanticShaderDebugInfo) {
  2401. return;
  2402. }
  2403. // Pop current scope from stack and clear current scope
  2404. currentDebugScopeId.pop();
  2405. dirtyScopeTracker = true;
  2406. }
  2407. // Comments in header
  2408. void Builder::enterFunction(Function const* function)
  2409. {
  2410. currentFunction = function;
  2411. // Save and disable debugInfo for HLSL entry point function. It is a wrapper
  2412. // function with no user code in it.
  2413. restoreNonSemanticShaderDebugInfo = emitNonSemanticShaderDebugInfo;
  2414. if (sourceLang == spv::SourceLanguage::HLSL && function == entryPointFunction) {
  2415. emitNonSemanticShaderDebugInfo = false;
  2416. }
  2417. if (emitNonSemanticShaderDebugInfo) {
  2418. // Initialize scope state
  2419. Id funcId = function->getFuncId();
  2420. Id debugFuncId = getDebugFunction(funcId);
  2421. currentDebugScopeId.push(debugFuncId);
  2422. // Create DebugFunctionDefinition
  2423. spv::Id resultId = getUniqueId();
  2424. Instruction* defInst = new Instruction(resultId, makeVoidType(), Op::OpExtInst);
  2425. defInst->reserveOperands(4);
  2426. defInst->addIdOperand(nonSemanticShaderDebugInfo);
  2427. defInst->addImmediateOperand(NonSemanticShaderDebugInfo100DebugFunctionDefinition);
  2428. defInst->addIdOperand(debugFuncId);
  2429. defInst->addIdOperand(funcId);
  2430. addInstruction(std::unique_ptr<Instruction>(defInst));
  2431. }
  2432. if (auto linkType = function->getLinkType(); linkType != LinkageType::Max) {
  2433. Id funcId = function->getFuncId();
  2434. addCapability(Capability::Linkage);
  2435. addLinkageDecoration(funcId, function->getExportName(), linkType);
  2436. }
  2437. }
  2438. // Comments in header
  2439. void Builder::leaveFunction()
  2440. {
  2441. Block* block = buildPoint;
  2442. Function& function = buildPoint->getParent();
  2443. assert(block);
  2444. // If our function did not contain a return, add a return void now.
  2445. if (! block->isTerminated()) {
  2446. if (function.getReturnType() == makeVoidType())
  2447. makeReturn(true);
  2448. else {
  2449. makeReturn(true, createUndefined(function.getReturnType()));
  2450. }
  2451. }
  2452. // Clear function scope from debug scope stack
  2453. if (emitNonSemanticShaderDebugInfo)
  2454. currentDebugScopeId.pop();
  2455. emitNonSemanticShaderDebugInfo = restoreNonSemanticShaderDebugInfo;
  2456. // Clear current function record
  2457. currentFunction = nullptr;
  2458. }
  2459. // Comments in header
  2460. void Builder::makeStatementTerminator(spv::Op opcode, const char *name)
  2461. {
  2462. addInstruction(std::unique_ptr<Instruction>(new Instruction(opcode)));
  2463. createAndSetNoPredecessorBlock(name);
  2464. }
  2465. // Comments in header
  2466. void Builder::makeStatementTerminator(spv::Op opcode, const std::vector<Id>& operands, const char* name)
  2467. {
  2468. // It's assumed that the terminator instruction is always of void return type
  2469. // However in future if there is a need for non void return type, new helper
  2470. // methods can be created.
  2471. createNoResultOp(opcode, operands);
  2472. createAndSetNoPredecessorBlock(name);
  2473. }
  2474. void Builder::createConstVariable(Id type, const char* name, Id constant, bool isGlobal)
  2475. {
  2476. if (emitNonSemanticShaderDebugInfo) {
  2477. Id debugType = getDebugType(type);
  2478. if (isGlobal) {
  2479. createDebugGlobalVariable(debugType, name, constant);
  2480. }
  2481. else {
  2482. auto debugLocal = createDebugLocalVariable(debugType, name);
  2483. makeDebugValue(debugLocal, constant);
  2484. }
  2485. }
  2486. }
  2487. // Comments in header
  2488. Id Builder::createUntypedVariable(Decoration precision, StorageClass storageClass, const char* name, Id dataType,
  2489. Id initializer)
  2490. {
  2491. Id resultUntypedPointerType = makeUntypedPointer(storageClass);
  2492. Instruction* inst = new Instruction(getUniqueId(), resultUntypedPointerType, Op::OpUntypedVariableKHR);
  2493. inst->addImmediateOperand(storageClass);
  2494. if (dataType != NoResult) {
  2495. Id dataPointerType = makePointer(storageClass, dataType);
  2496. inst->addIdOperand(dataPointerType);
  2497. }
  2498. if (initializer != NoResult)
  2499. inst->addIdOperand(initializer);
  2500. switch (storageClass) {
  2501. case StorageClass::Function:
  2502. // Validation rules require the declaration in the entry block
  2503. buildPoint->getParent().addLocalVariable(std::unique_ptr<Instruction>(inst));
  2504. break;
  2505. default:
  2506. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  2507. module.mapInstruction(inst);
  2508. break;
  2509. }
  2510. if (name)
  2511. addName(inst->getResultId(), name);
  2512. setPrecision(inst->getResultId(), precision);
  2513. return inst->getResultId();
  2514. }
  2515. // Comments in header
  2516. Id Builder::createVariable(Decoration precision, StorageClass storageClass, Id type, const char* name, Id initializer,
  2517. bool const compilerGenerated)
  2518. {
  2519. Id pointerType = makePointer(storageClass, type);
  2520. Instruction* inst = new Instruction(getUniqueId(), pointerType, Op::OpVariable);
  2521. inst->addImmediateOperand(storageClass);
  2522. if (initializer != NoResult)
  2523. inst->addIdOperand(initializer);
  2524. if (storageClass == StorageClass::Function) {
  2525. // Validation rules require the declaration in the entry block
  2526. buildPoint->getParent().addLocalVariable(std::unique_ptr<Instruction>(inst));
  2527. }
  2528. else {
  2529. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  2530. module.mapInstruction(inst);
  2531. }
  2532. if (emitNonSemanticShaderDebugInfo && !compilerGenerated)
  2533. {
  2534. // For debug info, we prefer respecting how the variable is declared in source code.
  2535. // We may emulate some local variables as global variable with private storage in SPIR-V, but we still want to
  2536. // treat them as local variables in debug info.
  2537. if (storageClass == StorageClass::Function || (currentFunction && storageClass == StorageClass::Private)) {
  2538. auto const debugLocalVariableId = createDebugLocalVariable(getDebugType(type), name);
  2539. makeDebugDeclare(debugLocalVariableId, inst->getResultId());
  2540. }
  2541. else {
  2542. createDebugGlobalVariable(getDebugType(type), name, inst->getResultId());
  2543. }
  2544. }
  2545. if (name)
  2546. addName(inst->getResultId(), name);
  2547. setPrecision(inst->getResultId(), precision);
  2548. return inst->getResultId();
  2549. }
  2550. // Comments in header
  2551. Id Builder::createUndefined(Id type)
  2552. {
  2553. Instruction* inst = new Instruction(getUniqueId(), type, Op::OpUndef);
  2554. addInstruction(std::unique_ptr<Instruction>(inst));
  2555. return inst->getResultId();
  2556. }
  2557. // av/vis/nonprivate are unnecessary and illegal for some storage classes.
  2558. spv::MemoryAccessMask Builder::sanitizeMemoryAccessForStorageClass(spv::MemoryAccessMask memoryAccess, StorageClass sc)
  2559. const
  2560. {
  2561. switch (sc) {
  2562. case spv::StorageClass::Uniform:
  2563. case spv::StorageClass::Workgroup:
  2564. case spv::StorageClass::StorageBuffer:
  2565. case spv::StorageClass::PhysicalStorageBufferEXT:
  2566. break;
  2567. default:
  2568. memoryAccess = spv::MemoryAccessMask(memoryAccess &
  2569. ~(spv::MemoryAccessMask::MakePointerAvailableKHR |
  2570. spv::MemoryAccessMask::MakePointerVisibleKHR |
  2571. spv::MemoryAccessMask::NonPrivatePointerKHR));
  2572. break;
  2573. }
  2574. return memoryAccess;
  2575. }
  2576. // Comments in header
  2577. void Builder::createStore(Id rValue, Id lValue, spv::MemoryAccessMask memoryAccess, spv::Scope scope,
  2578. unsigned int alignment)
  2579. {
  2580. Instruction* store = nullptr;
  2581. if (isUntypedPointer(lValue))
  2582. store = createDescHeapLoadStoreBaseRemap(lValue, Op::OpStore);
  2583. else {
  2584. store = new Instruction(Op::OpStore);
  2585. store->reserveOperands(2);
  2586. store->addIdOperand(lValue);
  2587. }
  2588. store->addIdOperand(rValue);
  2589. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2590. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2591. store->addImmediateOperand(memoryAccess);
  2592. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2593. store->addImmediateOperand(alignment);
  2594. }
  2595. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerAvailableKHR)) {
  2596. store->addIdOperand(makeUintConstant(scope));
  2597. }
  2598. }
  2599. addInstruction(std::unique_ptr<Instruction>(store));
  2600. }
  2601. // Comments in header
  2602. Id Builder::createLoad(Id lValue, spv::Decoration precision, spv::MemoryAccessMask memoryAccess,
  2603. spv::Scope scope, unsigned int alignment)
  2604. {
  2605. Instruction* load = nullptr;
  2606. if (isUntypedPointer(lValue))
  2607. load = createDescHeapLoadStoreBaseRemap(lValue, Op::OpLoad);
  2608. else {
  2609. load = new Instruction(getUniqueId(), getDerefTypeId(lValue), Op::OpLoad);
  2610. load->addIdOperand(lValue);
  2611. }
  2612. memoryAccess = sanitizeMemoryAccessForStorageClass(memoryAccess, getStorageClass(lValue));
  2613. if (memoryAccess != MemoryAccessMask::MaskNone) {
  2614. load->addImmediateOperand(memoryAccess);
  2615. if (anySet(memoryAccess, spv::MemoryAccessMask::Aligned)) {
  2616. load->addImmediateOperand(alignment);
  2617. }
  2618. if (anySet(memoryAccess, spv::MemoryAccessMask::MakePointerVisibleKHR)) {
  2619. load->addIdOperand(makeUintConstant(scope));
  2620. }
  2621. }
  2622. addInstruction(std::unique_ptr<Instruction>(load));
  2623. setPrecision(load->getResultId(), precision);
  2624. return load->getResultId();
  2625. }
  2626. Instruction* Builder::createDescHeapLoadStoreBaseRemap(Id baseId, Op op)
  2627. {
  2628. // could only be untypedAccessChain or BufferPointerEXT op.
  2629. spv::Op instOp = module.getInstruction(baseId)->getOpCode();
  2630. spv::Id baseVal = baseId;
  2631. // base type (from run time array)
  2632. spv::Id resultTy = getIdOperand(baseId, 0);
  2633. // Descriptor heap using run time array.
  2634. if (accessChain.descHeapInfo.descHeapStorageClass != StorageClass::Max)
  2635. resultTy = getIdOperand(resultTy, 0);
  2636. if (instOp == Op::OpBufferPointerEXT) {
  2637. // get base structure type from run time array of buffer structure type.
  2638. // create an extra untyped access chain for buffer pointer.
  2639. resultTy = accessChain.descHeapInfo.descHeapBaseTy;
  2640. Instruction* chain = new Instruction(getUniqueId(), getTypeId(baseId), Op::OpUntypedAccessChainKHR);
  2641. // base type.
  2642. chain->addIdOperand(resultTy);
  2643. // base
  2644. chain->addIdOperand(baseId);
  2645. // index
  2646. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  2647. chain->addIdOperand(accessChain.indexChain[i]);
  2648. }
  2649. addInstruction(std::unique_ptr<Instruction>(chain));
  2650. baseVal = chain->getResultId();
  2651. clearAccessChain();
  2652. } else if (instOp != Op::OpUntypedAccessChainKHR) {
  2653. assert("Not a untyped load type");
  2654. }
  2655. Instruction* inst = nullptr;
  2656. if (op == Op::OpStore)
  2657. inst = new Instruction(Op::OpStore);
  2658. else {
  2659. inst = new Instruction(getUniqueId(), resultTy, Op::OpLoad);
  2660. accessChain.descHeapInfo.descHeapInstId.push_back(inst);
  2661. }
  2662. inst->addIdOperand(baseVal);
  2663. return inst;
  2664. }
  2665. uint32_t Builder::isStructureHeapMember(Id id, std::vector<Id> indexChain,
  2666. unsigned int idx, spv::BuiltIn* bt, uint32_t* firstArrIndex)
  2667. {
  2668. unsigned currentIdx = idx;
  2669. // Process types, only array types could contain no constant id operands.
  2670. Id baseId = id;
  2671. if (baseId == NoType)
  2672. return 0;
  2673. if (isPointerType(baseId))
  2674. baseId = getContainedTypeId(baseId);
  2675. auto baseInst = module.getInstruction(baseId);
  2676. if (baseInst->getOpCode() == spv::Op::OpTypeArray ||
  2677. baseInst->getOpCode() == spv::Op::OpTypeRuntimeArray) {
  2678. if (firstArrIndex)
  2679. *firstArrIndex = currentIdx;
  2680. baseId = getContainedTypeId(baseId);
  2681. baseInst = module.getInstruction(baseId);
  2682. currentIdx++;
  2683. }
  2684. if (currentIdx >= indexChain.size())
  2685. return 0;
  2686. // Process index op.
  2687. auto indexInst = module.getInstruction(indexChain[currentIdx]);
  2688. if (indexInst->getOpCode() != spv::Op::OpConstant)
  2689. return 0;
  2690. auto index = indexInst->getImmediateOperand(0);
  2691. for (auto dec = decorations.begin(); dec != decorations.end(); dec++) {
  2692. if (dec->get()->getOpCode() == spv::Op::OpMemberDecorate && dec->get()->getIdOperand(0) == baseId &&
  2693. dec->get()->getImmediateOperand(1) == index &&
  2694. dec->get()->getImmediateOperand(2) == spv::Decoration::BuiltIn &&
  2695. (dec->get()->getImmediateOperand(3) == (unsigned)spv::BuiltIn::ResourceHeapEXT ||
  2696. dec->get()->getImmediateOperand(3) == (unsigned)spv::BuiltIn::SamplerHeapEXT)) {
  2697. if (bt)
  2698. *bt = (spv::BuiltIn)dec->get()->getImmediateOperand(3);
  2699. return currentIdx;
  2700. }
  2701. }
  2702. // New base.
  2703. if (baseInst->getOpCode() == spv::Op::OpTypeStruct) {
  2704. if (!baseInst->isIdOperand(index) || idx == indexChain.size() - 1)
  2705. return 0;
  2706. return isStructureHeapMember(baseInst->getIdOperand(index), indexChain, currentIdx + 1, bt, firstArrIndex);
  2707. }
  2708. return 0;
  2709. }
  2710. // Comments in header
  2711. Id Builder::createDescHeapAccessChain()
  2712. {
  2713. uint32_t rsrcOffsetIdx = accessChain.descHeapInfo.structRsrcTyOffsetCount;
  2714. if (rsrcOffsetIdx != 0)
  2715. accessChain.base = accessChain.descHeapInfo.structRemappedBase;
  2716. Id base = accessChain.base;
  2717. Id untypedResultTy = accessChain.descHeapInfo.descHeapBaseTy;
  2718. uint32_t explicitArrayStride = accessChain.descHeapInfo.descHeapBaseArrayStride;
  2719. std::vector<Id>& offsets = accessChain.indexChain;
  2720. uint32_t firstArrIndex = accessChain.descHeapInfo.structRsrcTyFirstArrIndex;
  2721. // both typeBufferEXT and UntypedPointer only contains storage class info.
  2722. StorageClass storageClass = (StorageClass)accessChain.descHeapInfo.descHeapStorageClass;
  2723. Id resultTy = makeUntypedPointer(storageClass == spv::StorageClass::StorageBuffer ? spv::StorageClass::StorageBuffer
  2724. : spv::StorageClass::Uniform);
  2725. // Make the untyped access chain instruction
  2726. Instruction* chain = new Instruction(getUniqueId(), makeUntypedPointer(getStorageClass(base)), Op::OpUntypedAccessChainKHR);
  2727. if (storageClass == spv::StorageClass::Uniform || storageClass == spv::StorageClass::StorageBuffer) {
  2728. // For buffer and uniform heap, split first index as heap array index
  2729. // Insert BufferPointer op and construct another access chain with following indexes.
  2730. Id bufferTy = makeUntypedPointer(storageClass, true);
  2731. Id strideId = NoResult;
  2732. if (explicitArrayStride == 0) {
  2733. strideId = createConstantSizeOfEXT(bufferTy);
  2734. } else {
  2735. strideId = makeUintConstant(explicitArrayStride);
  2736. }
  2737. Id runtimeArrTy = makeRuntimeArray(bufferTy);
  2738. addDecorationId(runtimeArrTy, spv::Decoration::ArrayStrideIdEXT, strideId);
  2739. chain->addIdOperand(runtimeArrTy);
  2740. chain->addIdOperand(base);
  2741. // We would only re-target current member resource directly to resource/sampler heap base.
  2742. // So the previous access chain index towards final resource type is not needed?
  2743. // In current draft, only keep the first 'array index' into last access chain index.
  2744. // As those resource can't be declared as an array, in current first draft, array index will
  2745. // be the second index. This will be refined later.
  2746. chain->addIdOperand(offsets[firstArrIndex]);
  2747. if (rsrcOffsetIdx != 0) {
  2748. for (uint32_t i = 0; i < rsrcOffsetIdx + 1; i++) {
  2749. if (rsrcOffsetIdx + i + 1 < offsets.size())
  2750. offsets[i] = offsets[i + rsrcOffsetIdx + 1];
  2751. }
  2752. } else {
  2753. for (uint32_t i = 0; i < offsets.size() - 1; i++) {
  2754. offsets[i] = offsets[i + 1];
  2755. }
  2756. }
  2757. for (uint32_t i = 0; i < rsrcOffsetIdx + 1; i++)
  2758. offsets.pop_back();
  2759. addInstruction(std::unique_ptr<Instruction>(chain));
  2760. // Create OpBufferPointer for loading target buffer descriptor.
  2761. Instruction* bufferUntypedDataPtr = new Instruction(getUniqueId(), resultTy, Op::OpBufferPointerEXT);
  2762. bufferUntypedDataPtr->addIdOperand(chain->getResultId());
  2763. addInstruction(std::unique_ptr<Instruction>(bufferUntypedDataPtr));
  2764. // Final/Second untyped access chain loading will be created during loading, current results only
  2765. // refer to the loading 'base'.
  2766. return bufferUntypedDataPtr->getResultId();
  2767. } else {
  2768. // image/sampler heap
  2769. Id strideId = NoResult;
  2770. if (explicitArrayStride == 0) {
  2771. strideId = createConstantSizeOfEXT(untypedResultTy);
  2772. } else {
  2773. strideId = makeUintConstant(explicitArrayStride);
  2774. }
  2775. Id runtimeArrTy = makeRuntimeArray(untypedResultTy);
  2776. addDecorationId(runtimeArrTy, spv::Decoration::ArrayStrideIdEXT, strideId);
  2777. chain->addIdOperand(runtimeArrTy);
  2778. chain->addIdOperand(base);
  2779. for (int i = 0; i < (int)offsets.size(); ++i)
  2780. chain->addIdOperand(offsets[i]);
  2781. addInstruction(std::unique_ptr<Instruction>(chain));
  2782. return chain->getResultId();
  2783. }
  2784. }
  2785. // Comments in header
  2786. Id Builder::createAccessChain(StorageClass storageClass, Id base, const std::vector<Id>& offsets)
  2787. {
  2788. // Figure out the final resulting type.
  2789. Id typeId = getResultingAccessChainType();
  2790. typeId = makePointer(storageClass, typeId);
  2791. // Make the instruction
  2792. Instruction* chain = new Instruction(getUniqueId(), typeId, Op::OpAccessChain);
  2793. chain->reserveOperands(offsets.size() + 1);
  2794. chain->addIdOperand(base);
  2795. for (int i = 0; i < (int)offsets.size(); ++i)
  2796. chain->addIdOperand(offsets[i]);
  2797. addInstruction(std::unique_ptr<Instruction>(chain));
  2798. return chain->getResultId();
  2799. }
  2800. Id Builder::createArrayLength(Id base, unsigned int member, unsigned int bits)
  2801. {
  2802. spv::Id intType = makeUintType(bits);
  2803. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpArrayLength);
  2804. length->reserveOperands(2);
  2805. length->addIdOperand(base);
  2806. length->addImmediateOperand(member);
  2807. addInstruction(std::unique_ptr<Instruction>(length));
  2808. return length->getResultId();
  2809. }
  2810. Id Builder::createCooperativeMatrixLengthKHR(Id type)
  2811. {
  2812. spv::Id intType = makeUintType(32);
  2813. // Generate code for spec constants if in spec constant operation
  2814. // generation mode.
  2815. if (generatingOpCodeForSpecConst) {
  2816. return createSpecConstantOp(Op::OpCooperativeMatrixLengthKHR, intType, std::vector<Id>(1, type), std::vector<Id>());
  2817. }
  2818. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthKHR);
  2819. length->addIdOperand(type);
  2820. addInstruction(std::unique_ptr<Instruction>(length));
  2821. return length->getResultId();
  2822. }
  2823. Id Builder::createCooperativeMatrixLengthNV(Id type)
  2824. {
  2825. spv::Id intType = makeUintType(32);
  2826. // Generate code for spec constants if in spec constant operation
  2827. // generation mode.
  2828. if (generatingOpCodeForSpecConst) {
  2829. return createSpecConstantOp(Op::OpCooperativeMatrixLengthNV, intType, std::vector<Id>(1, type), std::vector<Id>());
  2830. }
  2831. Instruction* length = new Instruction(getUniqueId(), intType, Op::OpCooperativeMatrixLengthNV);
  2832. length->addIdOperand(type);
  2833. addInstruction(std::unique_ptr<Instruction>(length));
  2834. return length->getResultId();
  2835. }
  2836. Id Builder::createCompositeExtract(Id composite, Id typeId, unsigned index)
  2837. {
  2838. // Generate code for spec constants if in spec constant operation
  2839. // generation mode.
  2840. if (generatingOpCodeForSpecConst) {
  2841. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite),
  2842. std::vector<Id>(1, index));
  2843. }
  2844. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2845. extract->reserveOperands(2);
  2846. extract->addIdOperand(composite);
  2847. extract->addImmediateOperand(index);
  2848. addInstruction(std::unique_ptr<Instruction>(extract));
  2849. return extract->getResultId();
  2850. }
  2851. Id Builder::createCompositeExtract(Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2852. {
  2853. // Generate code for spec constants if in spec constant operation
  2854. // generation mode.
  2855. if (generatingOpCodeForSpecConst) {
  2856. return createSpecConstantOp(Op::OpCompositeExtract, typeId, std::vector<Id>(1, composite), indexes);
  2857. }
  2858. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpCompositeExtract);
  2859. extract->reserveOperands(indexes.size() + 1);
  2860. extract->addIdOperand(composite);
  2861. for (int i = 0; i < (int)indexes.size(); ++i)
  2862. extract->addImmediateOperand(indexes[i]);
  2863. addInstruction(std::unique_ptr<Instruction>(extract));
  2864. return extract->getResultId();
  2865. }
  2866. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, unsigned index)
  2867. {
  2868. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2869. insert->reserveOperands(3);
  2870. insert->addIdOperand(object);
  2871. insert->addIdOperand(composite);
  2872. insert->addImmediateOperand(index);
  2873. addInstruction(std::unique_ptr<Instruction>(insert));
  2874. return insert->getResultId();
  2875. }
  2876. Id Builder::createCompositeInsert(Id object, Id composite, Id typeId, const std::vector<unsigned>& indexes)
  2877. {
  2878. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpCompositeInsert);
  2879. insert->reserveOperands(indexes.size() + 2);
  2880. insert->addIdOperand(object);
  2881. insert->addIdOperand(composite);
  2882. for (int i = 0; i < (int)indexes.size(); ++i)
  2883. insert->addImmediateOperand(indexes[i]);
  2884. addInstruction(std::unique_ptr<Instruction>(insert));
  2885. return insert->getResultId();
  2886. }
  2887. Id Builder::createVectorExtractDynamic(Id vector, Id typeId, Id componentIndex)
  2888. {
  2889. Instruction* extract = new Instruction(getUniqueId(), typeId, Op::OpVectorExtractDynamic);
  2890. extract->reserveOperands(2);
  2891. extract->addIdOperand(vector);
  2892. extract->addIdOperand(componentIndex);
  2893. addInstruction(std::unique_ptr<Instruction>(extract));
  2894. return extract->getResultId();
  2895. }
  2896. Id Builder::createVectorInsertDynamic(Id vector, Id typeId, Id component, Id componentIndex)
  2897. {
  2898. Instruction* insert = new Instruction(getUniqueId(), typeId, Op::OpVectorInsertDynamic);
  2899. insert->reserveOperands(3);
  2900. insert->addIdOperand(vector);
  2901. insert->addIdOperand(component);
  2902. insert->addIdOperand(componentIndex);
  2903. addInstruction(std::unique_ptr<Instruction>(insert));
  2904. return insert->getResultId();
  2905. }
  2906. // An opcode that has no operands, no result id, and no type
  2907. void Builder::createNoResultOp(Op opCode)
  2908. {
  2909. Instruction* op = new Instruction(opCode);
  2910. addInstruction(std::unique_ptr<Instruction>(op));
  2911. }
  2912. // An opcode that has one id operand, no result id, and no type
  2913. void Builder::createNoResultOp(Op opCode, Id operand)
  2914. {
  2915. Instruction* op = new Instruction(opCode);
  2916. op->addIdOperand(operand);
  2917. addInstruction(std::unique_ptr<Instruction>(op));
  2918. }
  2919. // An opcode that has one or more operands, no result id, and no type
  2920. void Builder::createNoResultOp(Op opCode, const std::vector<Id>& operands)
  2921. {
  2922. Instruction* op = new Instruction(opCode);
  2923. op->reserveOperands(operands.size());
  2924. for (auto id : operands) {
  2925. op->addIdOperand(id);
  2926. }
  2927. addInstruction(std::unique_ptr<Instruction>(op));
  2928. }
  2929. // An opcode that has multiple operands, no result id, and no type
  2930. void Builder::createNoResultOp(Op opCode, const std::vector<IdImmediate>& operands)
  2931. {
  2932. Instruction* op = new Instruction(opCode);
  2933. op->reserveOperands(operands.size());
  2934. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  2935. if (it->isId)
  2936. op->addIdOperand(it->word);
  2937. else
  2938. op->addImmediateOperand(it->word);
  2939. }
  2940. addInstruction(std::unique_ptr<Instruction>(op));
  2941. }
  2942. void Builder::createControlBarrier(Scope execution, Scope memory, MemorySemanticsMask semantics)
  2943. {
  2944. Instruction* op = new Instruction(Op::OpControlBarrier);
  2945. op->reserveOperands(3);
  2946. op->addIdOperand(makeUintConstant(execution));
  2947. op->addIdOperand(makeUintConstant(memory));
  2948. op->addIdOperand(makeUintConstant(semantics));
  2949. addInstruction(std::unique_ptr<Instruction>(op));
  2950. }
  2951. void Builder::createMemoryBarrier(Scope executionScope, MemorySemanticsMask memorySemantics)
  2952. {
  2953. Instruction* op = new Instruction(Op::OpMemoryBarrier);
  2954. op->reserveOperands(2);
  2955. op->addIdOperand(makeUintConstant((unsigned)executionScope));
  2956. op->addIdOperand(makeUintConstant((unsigned)memorySemantics));
  2957. addInstruction(std::unique_ptr<Instruction>(op));
  2958. }
  2959. // An opcode that has one operands, a result id, and a type
  2960. Id Builder::createUnaryOp(Op opCode, Id typeId, Id operand)
  2961. {
  2962. // Generate code for spec constants if in spec constant operation
  2963. // generation mode.
  2964. if (generatingOpCodeForSpecConst) {
  2965. return createSpecConstantOp(opCode, typeId, std::vector<Id>(1, operand), std::vector<Id>());
  2966. }
  2967. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2968. op->addIdOperand(operand);
  2969. addInstruction(std::unique_ptr<Instruction>(op));
  2970. return op->getResultId();
  2971. }
  2972. Id Builder::createBinOp(Op opCode, Id typeId, Id left, Id right)
  2973. {
  2974. // Generate code for spec constants if in spec constant operation
  2975. // generation mode.
  2976. if (generatingOpCodeForSpecConst) {
  2977. std::vector<Id> operands(2);
  2978. operands[0] = left; operands[1] = right;
  2979. return createSpecConstantOp(opCode, typeId, operands, std::vector<Id>());
  2980. }
  2981. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  2982. op->reserveOperands(2);
  2983. op->addIdOperand(left);
  2984. op->addIdOperand(right);
  2985. addInstruction(std::unique_ptr<Instruction>(op));
  2986. return op->getResultId();
  2987. }
  2988. Id Builder::createTriOp(Op opCode, Id typeId, Id op1, Id op2, Id op3)
  2989. {
  2990. // Generate code for spec constants if in spec constant operation
  2991. // generation mode.
  2992. if (generatingOpCodeForSpecConst) {
  2993. std::vector<Id> operands(3);
  2994. operands[0] = op1;
  2995. operands[1] = op2;
  2996. operands[2] = op3;
  2997. return createSpecConstantOp(
  2998. opCode, typeId, operands, std::vector<Id>());
  2999. }
  3000. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  3001. op->reserveOperands(3);
  3002. op->addIdOperand(op1);
  3003. op->addIdOperand(op2);
  3004. op->addIdOperand(op3);
  3005. addInstruction(std::unique_ptr<Instruction>(op));
  3006. return op->getResultId();
  3007. }
  3008. Id Builder::createOp(Op opCode, Id typeId, const std::vector<Id>& operands)
  3009. {
  3010. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  3011. op->reserveOperands(operands.size());
  3012. for (auto id : operands)
  3013. op->addIdOperand(id);
  3014. addInstruction(std::unique_ptr<Instruction>(op));
  3015. return op->getResultId();
  3016. }
  3017. Id Builder::createOp(Op opCode, Id typeId, const std::vector<IdImmediate>& operands)
  3018. {
  3019. Instruction* op = new Instruction(getUniqueId(), typeId, opCode);
  3020. op->reserveOperands(operands.size());
  3021. for (auto it = operands.cbegin(); it != operands.cend(); ++it) {
  3022. if (it->isId)
  3023. op->addIdOperand(it->word);
  3024. else
  3025. op->addImmediateOperand(it->word);
  3026. }
  3027. addInstruction(std::unique_ptr<Instruction>(op));
  3028. return op->getResultId();
  3029. }
  3030. Id Builder::createSpecConstantOp(Op opCode, Id typeId, const std::vector<Id>& operands,
  3031. const std::vector<unsigned>& literals)
  3032. {
  3033. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpSpecConstantOp);
  3034. op->reserveOperands(operands.size() + literals.size() + 1);
  3035. op->addImmediateOperand((unsigned) opCode);
  3036. for (auto it = operands.cbegin(); it != operands.cend(); ++it)
  3037. op->addIdOperand(*it);
  3038. for (auto it = literals.cbegin(); it != literals.cend(); ++it)
  3039. op->addImmediateOperand(*it);
  3040. module.mapInstruction(op);
  3041. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(op));
  3042. // OpSpecConstantOp's using 8 or 16 bit types require the associated capability
  3043. if (containsType(typeId, Op::OpTypeInt, 8))
  3044. addCapability(Capability::Int8);
  3045. if (containsType(typeId, Op::OpTypeInt, 16))
  3046. addCapability(Capability::Int16);
  3047. if (containsType(typeId, Op::OpTypeFloat, 16))
  3048. addCapability(Capability::Float16);
  3049. return op->getResultId();
  3050. }
  3051. Id Builder::createFunctionCall(spv::Function* function, const std::vector<spv::Id>& args)
  3052. {
  3053. Instruction* op = new Instruction(getUniqueId(), function->getReturnType(), Op::OpFunctionCall);
  3054. op->reserveOperands(args.size() + 1);
  3055. op->addIdOperand(function->getId());
  3056. for (int a = 0; a < (int)args.size(); ++a)
  3057. op->addIdOperand(args[a]);
  3058. addInstruction(std::unique_ptr<Instruction>(op));
  3059. return op->getResultId();
  3060. }
  3061. // Comments in header
  3062. Id Builder::createRvalueSwizzle(Decoration precision, Id typeId, Id source, const std::vector<unsigned>& channels)
  3063. {
  3064. if (channels.size() == 1)
  3065. return setPrecision(createCompositeExtract(source, typeId, channels.front()), precision);
  3066. if (generatingOpCodeForSpecConst) {
  3067. std::vector<Id> operands(2);
  3068. operands[0] = operands[1] = source;
  3069. return setPrecision(createSpecConstantOp(Op::OpVectorShuffle, typeId, operands, channels), precision);
  3070. }
  3071. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  3072. assert(isVector(source));
  3073. swizzle->reserveOperands(channels.size() + 2);
  3074. swizzle->addIdOperand(source);
  3075. swizzle->addIdOperand(source);
  3076. for (int i = 0; i < (int)channels.size(); ++i)
  3077. swizzle->addImmediateOperand(channels[i]);
  3078. addInstruction(std::unique_ptr<Instruction>(swizzle));
  3079. return setPrecision(swizzle->getResultId(), precision);
  3080. }
  3081. // Comments in header
  3082. Id Builder::createLvalueSwizzle(Id typeId, Id target, Id source, const std::vector<unsigned>& channels)
  3083. {
  3084. if (channels.size() == 1 && getNumComponents(source) == 1)
  3085. return createCompositeInsert(source, target, typeId, channels.front());
  3086. Instruction* swizzle = new Instruction(getUniqueId(), typeId, Op::OpVectorShuffle);
  3087. assert(isVector(target));
  3088. swizzle->reserveOperands(2);
  3089. swizzle->addIdOperand(target);
  3090. assert(getNumComponents(source) == channels.size());
  3091. assert(isVector(source));
  3092. swizzle->addIdOperand(source);
  3093. // Set up an identity shuffle from the base value to the result value
  3094. unsigned int components[4];
  3095. int numTargetComponents = getNumComponents(target);
  3096. for (int i = 0; i < numTargetComponents; ++i)
  3097. components[i] = i;
  3098. // Punch in the l-value swizzle
  3099. for (int i = 0; i < (int)channels.size(); ++i)
  3100. components[channels[i]] = numTargetComponents + i;
  3101. // finish the instruction with these components selectors
  3102. swizzle->reserveOperands(numTargetComponents);
  3103. for (int i = 0; i < numTargetComponents; ++i)
  3104. swizzle->addImmediateOperand(components[i]);
  3105. addInstruction(std::unique_ptr<Instruction>(swizzle));
  3106. return swizzle->getResultId();
  3107. }
  3108. // Comments in header
  3109. void Builder::promoteScalar(Decoration precision, Id& left, Id& right)
  3110. {
  3111. // choose direction of promotion (+1 for left to right, -1 for right to left)
  3112. int direction = !isScalar(right) - !isScalar(left);
  3113. auto const &makeVec = [&](Id component, Id other) {
  3114. if (isCooperativeVector(other)) {
  3115. return makeCooperativeVectorTypeNV(getTypeId(component), getCooperativeVectorNumComponents(getTypeId(other)));
  3116. } else {
  3117. return makeVectorType(getTypeId(component), getNumComponents(other));
  3118. }
  3119. };
  3120. if (direction > 0)
  3121. left = smearScalar(precision, left, makeVec(left, right));
  3122. else if (direction < 0)
  3123. right = smearScalar(precision, right, makeVec(right, left));
  3124. return;
  3125. }
  3126. // Comments in header
  3127. Id Builder::smearScalar(Decoration precision, Id scalar, Id vectorType)
  3128. {
  3129. assert(getNumComponents(scalar) == 1);
  3130. assert(getTypeId(scalar) == getScalarTypeId(vectorType));
  3131. int numComponents = getNumTypeComponents(vectorType);
  3132. if (numComponents == 1 && !isCooperativeVectorType(vectorType) && !isVectorType(vectorType))
  3133. return scalar;
  3134. Instruction* smear = nullptr;
  3135. if (generatingOpCodeForSpecConst) {
  3136. auto members = std::vector<spv::Id>(numComponents, scalar);
  3137. // Sometime even in spec-constant-op mode, the temporary vector created by
  3138. // promoting a scalar might not be a spec constant. This should depend on
  3139. // the scalar.
  3140. // e.g.:
  3141. // const vec2 spec_const_result = a_spec_const_vec2 + a_front_end_const_scalar;
  3142. // In such cases, the temporary vector created from a_front_end_const_scalar
  3143. // is not a spec constant vector, even though the binary operation node is marked
  3144. // as 'specConstant' and we are in spec-constant-op mode.
  3145. auto result_id = makeCompositeConstant(vectorType, members, isSpecConstant(scalar));
  3146. smear = module.getInstruction(result_id);
  3147. } else {
  3148. bool replicate = (useReplicatedComposites || isCooperativeVectorType(vectorType)) && (numComponents > 0);
  3149. if (replicate) {
  3150. numComponents = 1;
  3151. addCapability(spv::Capability::ReplicatedCompositesEXT);
  3152. addExtension(spv::E_SPV_EXT_replicated_composites);
  3153. }
  3154. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  3155. smear = new Instruction(getUniqueId(), vectorType, opcode);
  3156. smear->reserveOperands(numComponents);
  3157. for (int c = 0; c < numComponents; ++c)
  3158. smear->addIdOperand(scalar);
  3159. addInstruction(std::unique_ptr<Instruction>(smear));
  3160. }
  3161. return setPrecision(smear->getResultId(), precision);
  3162. }
  3163. // Comments in header
  3164. Id Builder::createBuiltinCall(Id resultType, Id builtins, int entryPoint, const std::vector<Id>& args)
  3165. {
  3166. Instruction* inst = new Instruction(getUniqueId(), resultType, Op::OpExtInst);
  3167. inst->reserveOperands(args.size() + 2);
  3168. inst->addIdOperand(builtins);
  3169. inst->addImmediateOperand(entryPoint);
  3170. for (int arg = 0; arg < (int)args.size(); ++arg)
  3171. inst->addIdOperand(args[arg]);
  3172. addInstruction(std::unique_ptr<Instruction>(inst));
  3173. return inst->getResultId();
  3174. }
  3175. // Accept all parameters needed to create a texture instruction.
  3176. // Create the correct instruction based on the inputs, and make the call.
  3177. Id Builder::createTextureCall(Decoration precision, Id resultType, bool sparse, bool fetch, bool proj, bool gather,
  3178. bool noImplicitLod, const TextureParameters& parameters, ImageOperandsMask signExtensionMask)
  3179. {
  3180. std::vector<Id> texArgs;
  3181. //
  3182. // Set up the fixed arguments
  3183. //
  3184. bool explicitLod = false;
  3185. texArgs.push_back(parameters.sampler);
  3186. texArgs.push_back(parameters.coords);
  3187. if (parameters.Dref != NoResult)
  3188. texArgs.push_back(parameters.Dref);
  3189. if (parameters.component != NoResult)
  3190. texArgs.push_back(parameters.component);
  3191. if (parameters.granularity != NoResult)
  3192. texArgs.push_back(parameters.granularity);
  3193. if (parameters.coarse != NoResult)
  3194. texArgs.push_back(parameters.coarse);
  3195. //
  3196. // Set up the optional arguments
  3197. //
  3198. size_t optArgNum = texArgs.size(); // the position of the mask for the optional arguments, if any.
  3199. ImageOperandsMask mask = ImageOperandsMask::MaskNone; // the mask operand
  3200. if (parameters.bias) {
  3201. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Bias);
  3202. texArgs.push_back(parameters.bias);
  3203. }
  3204. if (parameters.lod) {
  3205. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  3206. texArgs.push_back(parameters.lod);
  3207. explicitLod = true;
  3208. } else if (parameters.gradX) {
  3209. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Grad);
  3210. texArgs.push_back(parameters.gradX);
  3211. texArgs.push_back(parameters.gradY);
  3212. explicitLod = true;
  3213. } else if (noImplicitLod && ! fetch && ! gather) {
  3214. // have to explicitly use lod of 0 if not allowed to have them be implicit, and
  3215. // we would otherwise be about to issue an implicit instruction
  3216. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Lod);
  3217. texArgs.push_back(makeFloatConstant(0.0));
  3218. explicitLod = true;
  3219. }
  3220. if (parameters.offset) {
  3221. if (isConstant(parameters.offset))
  3222. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffset);
  3223. else {
  3224. addCapability(Capability::ImageGatherExtended);
  3225. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offset);
  3226. }
  3227. texArgs.push_back(parameters.offset);
  3228. }
  3229. if (parameters.offsets) {
  3230. if (!isConstant(parameters.offsets) && sourceLang == spv::SourceLanguage::GLSL) {
  3231. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Offsets);
  3232. } else {
  3233. addCapability(Capability::ImageGatherExtended);
  3234. mask = (ImageOperandsMask)(mask | ImageOperandsMask::ConstOffsets);
  3235. }
  3236. texArgs.push_back(parameters.offsets);
  3237. }
  3238. if (parameters.sample) {
  3239. mask = (ImageOperandsMask)(mask | ImageOperandsMask::Sample);
  3240. texArgs.push_back(parameters.sample);
  3241. }
  3242. if (parameters.lodClamp) {
  3243. // capability if this bit is used
  3244. addCapability(Capability::MinLod);
  3245. mask = (ImageOperandsMask)(mask | ImageOperandsMask::MinLod);
  3246. texArgs.push_back(parameters.lodClamp);
  3247. }
  3248. if (parameters.nonprivate) {
  3249. mask = mask | ImageOperandsMask::NonPrivateTexelKHR;
  3250. }
  3251. if (parameters.volatil) {
  3252. mask = mask | ImageOperandsMask::VolatileTexelKHR;
  3253. }
  3254. if (parameters.nontemporal) {
  3255. mask = mask | ImageOperandsMask::Nontemporal;
  3256. }
  3257. mask = mask | signExtensionMask;
  3258. // insert the operand for the mask, if any bits were set.
  3259. if (mask != ImageOperandsMask::MaskNone)
  3260. texArgs.insert(texArgs.begin() + optArgNum, (Id)mask);
  3261. //
  3262. // Set up the instruction
  3263. //
  3264. Op opCode = Op::OpNop; // All paths below need to set this
  3265. if (fetch) {
  3266. if (sparse)
  3267. opCode = Op::OpImageSparseFetch;
  3268. else
  3269. opCode = Op::OpImageFetch;
  3270. } else if (parameters.granularity && parameters.coarse) {
  3271. opCode = Op::OpImageSampleFootprintNV;
  3272. } else if (gather) {
  3273. if (parameters.Dref)
  3274. if (sparse)
  3275. opCode = Op::OpImageSparseDrefGather;
  3276. else
  3277. opCode = Op::OpImageDrefGather;
  3278. else
  3279. if (sparse)
  3280. opCode = Op::OpImageSparseGather;
  3281. else
  3282. opCode = Op::OpImageGather;
  3283. } else if (explicitLod) {
  3284. if (parameters.Dref) {
  3285. if (proj)
  3286. if (sparse)
  3287. opCode = Op::OpImageSparseSampleProjDrefExplicitLod;
  3288. else
  3289. opCode = Op::OpImageSampleProjDrefExplicitLod;
  3290. else
  3291. if (sparse)
  3292. opCode = Op::OpImageSparseSampleDrefExplicitLod;
  3293. else
  3294. opCode = Op::OpImageSampleDrefExplicitLod;
  3295. } else {
  3296. if (proj)
  3297. if (sparse)
  3298. opCode = Op::OpImageSparseSampleProjExplicitLod;
  3299. else
  3300. opCode = Op::OpImageSampleProjExplicitLod;
  3301. else
  3302. if (sparse)
  3303. opCode = Op::OpImageSparseSampleExplicitLod;
  3304. else
  3305. opCode = Op::OpImageSampleExplicitLod;
  3306. }
  3307. } else {
  3308. if (parameters.Dref) {
  3309. if (proj)
  3310. if (sparse)
  3311. opCode = Op::OpImageSparseSampleProjDrefImplicitLod;
  3312. else
  3313. opCode = Op::OpImageSampleProjDrefImplicitLod;
  3314. else
  3315. if (sparse)
  3316. opCode = Op::OpImageSparseSampleDrefImplicitLod;
  3317. else
  3318. opCode = Op::OpImageSampleDrefImplicitLod;
  3319. } else {
  3320. if (proj)
  3321. if (sparse)
  3322. opCode = Op::OpImageSparseSampleProjImplicitLod;
  3323. else
  3324. opCode = Op::OpImageSampleProjImplicitLod;
  3325. else
  3326. if (sparse)
  3327. opCode = Op::OpImageSparseSampleImplicitLod;
  3328. else
  3329. opCode = Op::OpImageSampleImplicitLod;
  3330. }
  3331. }
  3332. // See if the result type is expecting a smeared result.
  3333. // This happens when a legacy shadow*() call is made, which
  3334. // gets a vec4 back instead of a float.
  3335. Id smearedType = resultType;
  3336. if (! isScalarType(resultType)) {
  3337. switch (opCode) {
  3338. case Op::OpImageSampleDrefImplicitLod:
  3339. case Op::OpImageSampleDrefExplicitLod:
  3340. case Op::OpImageSampleProjDrefImplicitLod:
  3341. case Op::OpImageSampleProjDrefExplicitLod:
  3342. resultType = getScalarTypeId(resultType);
  3343. break;
  3344. default:
  3345. break;
  3346. }
  3347. }
  3348. Id typeId0 = 0;
  3349. Id typeId1 = 0;
  3350. if (sparse) {
  3351. typeId0 = resultType;
  3352. typeId1 = getDerefTypeId(parameters.texelOut);
  3353. resultType = makeStructResultType(typeId0, typeId1);
  3354. }
  3355. // Build the SPIR-V instruction
  3356. Instruction* textureInst = new Instruction(getUniqueId(), resultType, opCode);
  3357. textureInst->reserveOperands(optArgNum + (texArgs.size() - (optArgNum + 1)));
  3358. for (size_t op = 0; op < optArgNum; ++op)
  3359. textureInst->addIdOperand(texArgs[op]);
  3360. if (optArgNum < texArgs.size())
  3361. textureInst->addImmediateOperand(texArgs[optArgNum]);
  3362. for (size_t op = optArgNum + 1; op < texArgs.size(); ++op)
  3363. textureInst->addIdOperand(texArgs[op]);
  3364. setPrecision(textureInst->getResultId(), precision);
  3365. addInstruction(std::unique_ptr<Instruction>(textureInst));
  3366. Id resultId = textureInst->getResultId();
  3367. if (sparse) {
  3368. // set capability
  3369. addCapability(Capability::SparseResidency);
  3370. // Decode the return type that was a special structure
  3371. createStore(createCompositeExtract(resultId, typeId1, 1), parameters.texelOut);
  3372. resultId = createCompositeExtract(resultId, typeId0, 0);
  3373. setPrecision(resultId, precision);
  3374. } else {
  3375. // When a smear is needed, do it, as per what was computed
  3376. // above when resultType was changed to a scalar type.
  3377. if (resultType != smearedType)
  3378. resultId = smearScalar(precision, resultId, smearedType);
  3379. }
  3380. return resultId;
  3381. }
  3382. // Comments in header
  3383. Id Builder::createTextureQueryCall(Op opCode, const TextureParameters& parameters, bool isUnsignedResult)
  3384. {
  3385. // Figure out the result type
  3386. Id resultType = 0;
  3387. switch (opCode) {
  3388. case Op::OpImageQuerySize:
  3389. case Op::OpImageQuerySizeLod:
  3390. {
  3391. int numComponents = 0;
  3392. switch (getTypeDimensionality(getImageType(parameters.sampler))) {
  3393. case Dim::Dim1D:
  3394. case Dim::Buffer:
  3395. numComponents = 1;
  3396. break;
  3397. case Dim::Dim2D:
  3398. case Dim::Cube:
  3399. case Dim::Rect:
  3400. case Dim::SubpassData:
  3401. numComponents = 2;
  3402. break;
  3403. case Dim::Dim3D:
  3404. numComponents = 3;
  3405. break;
  3406. default:
  3407. assert(0);
  3408. break;
  3409. }
  3410. if (isArrayedImageType(getImageType(parameters.sampler)))
  3411. ++numComponents;
  3412. Id intType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3413. if (numComponents == 1)
  3414. resultType = intType;
  3415. else
  3416. resultType = makeVectorType(intType, numComponents);
  3417. break;
  3418. }
  3419. case Op::OpImageQueryLod:
  3420. resultType = makeVectorType(getScalarTypeId(getTypeId(parameters.coords)), 2);
  3421. break;
  3422. case Op::OpImageQueryLevels:
  3423. case Op::OpImageQuerySamples:
  3424. resultType = isUnsignedResult ? makeUintType(32) : makeIntType(32);
  3425. break;
  3426. default:
  3427. assert(0);
  3428. break;
  3429. }
  3430. Instruction* query = new Instruction(getUniqueId(), resultType, opCode);
  3431. query->addIdOperand(parameters.sampler);
  3432. if (parameters.coords)
  3433. query->addIdOperand(parameters.coords);
  3434. if (parameters.lod)
  3435. query->addIdOperand(parameters.lod);
  3436. addInstruction(std::unique_ptr<Instruction>(query));
  3437. addCapability(Capability::ImageQuery);
  3438. return query->getResultId();
  3439. }
  3440. // External comments in header.
  3441. // Operates recursively to visit the composite's hierarchy.
  3442. Id Builder::createCompositeCompare(Decoration precision, Id value1, Id value2, bool equal)
  3443. {
  3444. Id boolType = makeBoolType();
  3445. Id valueType = getTypeId(value1);
  3446. Id resultId = NoResult;
  3447. int numConstituents = getNumTypeConstituents(valueType);
  3448. // Scalars and Vectors
  3449. if (isScalarType(valueType) || isVectorType(valueType)) {
  3450. assert(valueType == getTypeId(value2));
  3451. // These just need a single comparison, just have
  3452. // to figure out what it is.
  3453. Op op;
  3454. switch (getMostBasicTypeClass(valueType)) {
  3455. case Op::OpTypeFloat:
  3456. op = equal ? Op::OpFOrdEqual : Op::OpFUnordNotEqual;
  3457. break;
  3458. case Op::OpTypeInt:
  3459. default:
  3460. op = equal ? Op::OpIEqual : Op::OpINotEqual;
  3461. break;
  3462. case Op::OpTypeBool:
  3463. op = equal ? Op::OpLogicalEqual : Op::OpLogicalNotEqual;
  3464. precision = NoPrecision;
  3465. break;
  3466. }
  3467. if (isScalarType(valueType)) {
  3468. // scalar
  3469. resultId = createBinOp(op, boolType, value1, value2);
  3470. } else {
  3471. // vector
  3472. resultId = createBinOp(op, makeVectorType(boolType, numConstituents), value1, value2);
  3473. setPrecision(resultId, precision);
  3474. // reduce vector compares...
  3475. resultId = createUnaryOp(equal ? Op::OpAll : Op::OpAny, boolType, resultId);
  3476. }
  3477. return setPrecision(resultId, precision);
  3478. }
  3479. // Only structs, arrays, and matrices should be left.
  3480. // They share in common the reduction operation across their constituents.
  3481. assert(isAggregateType(valueType) || isMatrixType(valueType));
  3482. // Compare each pair of constituents
  3483. for (int constituent = 0; constituent < numConstituents; ++constituent) {
  3484. std::vector<unsigned> indexes(1, constituent);
  3485. Id constituentType1 = getContainedTypeId(getTypeId(value1), constituent);
  3486. Id constituentType2 = getContainedTypeId(getTypeId(value2), constituent);
  3487. Id constituent1 = createCompositeExtract(value1, constituentType1, indexes);
  3488. Id constituent2 = createCompositeExtract(value2, constituentType2, indexes);
  3489. Id subResultId = createCompositeCompare(precision, constituent1, constituent2, equal);
  3490. if (constituent == 0)
  3491. resultId = subResultId;
  3492. else
  3493. resultId = setPrecision(createBinOp(equal ? Op::OpLogicalAnd : Op::OpLogicalOr, boolType, resultId, subResultId),
  3494. precision);
  3495. }
  3496. return resultId;
  3497. }
  3498. // OpCompositeConstruct
  3499. Id Builder::createCompositeConstruct(Id typeId, const std::vector<Id>& constituents)
  3500. {
  3501. assert(isAggregateType(typeId) || (getNumTypeConstituents(typeId) > 1 &&
  3502. getNumTypeConstituents(typeId) == constituents.size()) ||
  3503. ((isCooperativeVectorType(typeId) || isVectorType(typeId)) && constituents.size() == 1));
  3504. if (generatingOpCodeForSpecConst) {
  3505. // Sometime, even in spec-constant-op mode, the constant composite to be
  3506. // constructed may not be a specialization constant.
  3507. // e.g.:
  3508. // const mat2 m2 = mat2(a_spec_const, a_front_end_const, another_front_end_const, third_front_end_const);
  3509. // The first column vector should be a spec constant one, as a_spec_const is a spec constant.
  3510. // The second column vector should NOT be spec constant, as it does not contain any spec constants.
  3511. // To handle such cases, we check the constituents of the constant vector to determine whether this
  3512. // vector should be created as a spec constant.
  3513. return makeCompositeConstant(typeId, constituents,
  3514. std::any_of(constituents.begin(), constituents.end(),
  3515. [&](spv::Id id) { return isSpecConstant(id); }));
  3516. }
  3517. bool replicate = false;
  3518. size_t numConstituents = constituents.size();
  3519. if (useReplicatedComposites || isCooperativeVectorType(typeId)) {
  3520. replicate = numConstituents > 0 &&
  3521. std::equal(constituents.begin() + 1, constituents.end(), constituents.begin());
  3522. }
  3523. if (replicate) {
  3524. numConstituents = 1;
  3525. addCapability(spv::Capability::ReplicatedCompositesEXT);
  3526. addExtension(spv::E_SPV_EXT_replicated_composites);
  3527. }
  3528. Op opcode = replicate ? Op::OpCompositeConstructReplicateEXT : Op::OpCompositeConstruct;
  3529. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3530. op->reserveOperands(constituents.size());
  3531. for (size_t c = 0; c < numConstituents; ++c)
  3532. op->addIdOperand(constituents[c]);
  3533. addInstruction(std::unique_ptr<Instruction>(op));
  3534. return op->getResultId();
  3535. }
  3536. // coopmat conversion
  3537. Id Builder::createCooperativeMatrixConversion(Id typeId, Id source)
  3538. {
  3539. Instruction* op = new Instruction(getUniqueId(), typeId, Op::OpCooperativeMatrixConvertNV);
  3540. op->addIdOperand(source);
  3541. addInstruction(std::unique_ptr<Instruction>(op));
  3542. return op->getResultId();
  3543. }
  3544. // coopmat reduce
  3545. Id Builder::createCooperativeMatrixReduce(Op opcode, Id typeId, Id source, unsigned int mask, Id func)
  3546. {
  3547. Instruction* op = new Instruction(getUniqueId(), typeId, opcode);
  3548. op->addIdOperand(source);
  3549. op->addImmediateOperand(mask);
  3550. op->addIdOperand(func);
  3551. addInstruction(std::unique_ptr<Instruction>(op));
  3552. return op->getResultId();
  3553. }
  3554. // coopmat per-element operation
  3555. Id Builder::createCooperativeMatrixPerElementOp(Id typeId, const std::vector<Id>& operands)
  3556. {
  3557. Instruction* op = new Instruction(getUniqueId(), typeId, spv::Op::OpCooperativeMatrixPerElementOpNV);
  3558. // skip operand[0], which is where the result is stored
  3559. for (uint32_t i = 1; i < operands.size(); ++i) {
  3560. op->addIdOperand(operands[i]);
  3561. }
  3562. addInstruction(std::unique_ptr<Instruction>(op));
  3563. return op->getResultId();
  3564. }
  3565. // Vector or scalar constructor
  3566. Id Builder::createConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3567. {
  3568. Id result = NoResult;
  3569. unsigned int numTargetComponents = getNumTypeComponents(resultTypeId);
  3570. unsigned int targetComponent = 0;
  3571. // Special case: when calling a vector constructor with a single scalar
  3572. // argument, smear the scalar
  3573. if (sources.size() == 1 && isScalar(sources[0]) && (numTargetComponents > 1 || isCooperativeVectorType(resultTypeId)))
  3574. return smearScalar(precision, sources[0], resultTypeId);
  3575. // Special case: 2 vectors of equal size
  3576. if (sources.size() == 1 &&
  3577. (isVector(sources[0]) || isCooperativeVector(sources[0])) &&
  3578. numTargetComponents == getNumComponents(sources[0])) {
  3579. if (isCooperativeVector(sources[0]) != isCooperativeVectorType(resultTypeId)) {
  3580. assert(isVector(sources[0]) != isVectorType(resultTypeId));
  3581. return createUnaryOp(spv::Op::OpBitcast, resultTypeId, sources[0]);
  3582. } else {
  3583. assert(resultTypeId == getTypeId(sources[0]));
  3584. return sources[0];
  3585. }
  3586. }
  3587. // accumulate the arguments for OpCompositeConstruct
  3588. std::vector<Id> constituents;
  3589. Id scalarTypeId = getScalarTypeId(resultTypeId);
  3590. // lambda to store the result of visiting an argument component
  3591. const auto latchResult = [&](Id comp) {
  3592. if (numTargetComponents > 1 || isVectorType(resultTypeId))
  3593. constituents.push_back(comp);
  3594. else
  3595. result = comp;
  3596. ++targetComponent;
  3597. };
  3598. // lambda to visit a vector argument's components
  3599. const auto accumulateVectorConstituents = [&](Id sourceArg) {
  3600. unsigned int sourceSize = getNumComponents(sourceArg);
  3601. unsigned int sourcesToUse = sourceSize;
  3602. if (sourcesToUse + targetComponent > numTargetComponents)
  3603. sourcesToUse = numTargetComponents - targetComponent;
  3604. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3605. std::vector<unsigned> swiz;
  3606. swiz.push_back(s);
  3607. latchResult(createRvalueSwizzle(precision, scalarTypeId, sourceArg, swiz));
  3608. }
  3609. };
  3610. // lambda to visit a matrix argument's components
  3611. const auto accumulateMatrixConstituents = [&](Id sourceArg) {
  3612. unsigned int sourceSize = getNumColumns(sourceArg) * getNumRows(sourceArg);
  3613. unsigned int sourcesToUse = sourceSize;
  3614. if (sourcesToUse + targetComponent > numTargetComponents)
  3615. sourcesToUse = numTargetComponents - targetComponent;
  3616. unsigned int col = 0;
  3617. unsigned int row = 0;
  3618. for (unsigned int s = 0; s < sourcesToUse; ++s) {
  3619. if (row >= getNumRows(sourceArg)) {
  3620. row = 0;
  3621. col++;
  3622. }
  3623. std::vector<Id> indexes;
  3624. indexes.push_back(col);
  3625. indexes.push_back(row);
  3626. latchResult(createCompositeExtract(sourceArg, scalarTypeId, indexes));
  3627. row++;
  3628. }
  3629. };
  3630. // Go through the source arguments, each one could have either
  3631. // a single or multiple components to contribute.
  3632. for (unsigned int i = 0; i < sources.size(); ++i) {
  3633. if (isScalar(sources[i]) || isPointer(sources[i]))
  3634. latchResult(sources[i]);
  3635. else if (isVector(sources[i]) || isCooperativeVector(sources[i]))
  3636. accumulateVectorConstituents(sources[i]);
  3637. else if (isMatrix(sources[i]))
  3638. accumulateMatrixConstituents(sources[i]);
  3639. else
  3640. assert(0);
  3641. if (targetComponent >= numTargetComponents)
  3642. break;
  3643. }
  3644. // If the result is a vector, make it from the gathered constituents.
  3645. if (constituents.size() > 0) {
  3646. result = createCompositeConstruct(resultTypeId, constituents);
  3647. return setPrecision(result, precision);
  3648. } else {
  3649. // Precision was set when generating this component.
  3650. return result;
  3651. }
  3652. }
  3653. // Comments in header
  3654. Id Builder::createMatrixConstructor(Decoration precision, const std::vector<Id>& sources, Id resultTypeId)
  3655. {
  3656. Id componentTypeId = getScalarTypeId(resultTypeId);
  3657. unsigned int numCols = getTypeNumColumns(resultTypeId);
  3658. unsigned int numRows = getTypeNumRows(resultTypeId);
  3659. Instruction* instr = module.getInstruction(componentTypeId);
  3660. const unsigned bitCount = instr->getImmediateOperand(0);
  3661. // Optimize matrix constructed from a bigger matrix
  3662. if (isMatrix(sources[0]) && getNumColumns(sources[0]) >= numCols && getNumRows(sources[0]) >= numRows) {
  3663. // To truncate the matrix to a smaller number of rows/columns, we need to:
  3664. // 1. For each column, extract the column and truncate it to the required size using shuffle
  3665. // 2. Assemble the resulting matrix from all columns
  3666. Id matrix = sources[0];
  3667. Id columnTypeId = getContainedTypeId(resultTypeId);
  3668. Id sourceColumnTypeId = getContainedTypeId(getTypeId(matrix));
  3669. std::vector<unsigned> channels;
  3670. for (unsigned int row = 0; row < numRows; ++row)
  3671. channels.push_back(row);
  3672. std::vector<Id> matrixColumns;
  3673. for (unsigned int col = 0; col < numCols; ++col) {
  3674. std::vector<unsigned> indexes;
  3675. indexes.push_back(col);
  3676. Id colv = createCompositeExtract(matrix, sourceColumnTypeId, indexes);
  3677. setPrecision(colv, precision);
  3678. if (numRows != getNumRows(matrix)) {
  3679. matrixColumns.push_back(createRvalueSwizzle(precision, columnTypeId, colv, channels));
  3680. } else {
  3681. matrixColumns.push_back(colv);
  3682. }
  3683. }
  3684. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3685. }
  3686. // Detect a matrix being constructed from a repeated vector of the correct size.
  3687. // Create the composite directly from it.
  3688. if (sources.size() == numCols && isVector(sources[0]) && getNumComponents(sources[0]) == numRows &&
  3689. std::equal(sources.begin() + 1, sources.end(), sources.begin())) {
  3690. return setPrecision(createCompositeConstruct(resultTypeId, sources), precision);
  3691. }
  3692. // Otherwise, will use a two step process
  3693. // 1. make a compile-time 2D array of values
  3694. // 2. construct a matrix from that array
  3695. // Step 1.
  3696. // initialize the array to the identity matrix
  3697. Id ids[maxMatrixSize][maxMatrixSize];
  3698. Id one = (bitCount == 64 ? makeDoubleConstant(1.0) : makeFloatConstant(1.0));
  3699. Id zero = (bitCount == 64 ? makeDoubleConstant(0.0) : makeFloatConstant(0.0));
  3700. for (int col = 0; col < 4; ++col) {
  3701. for (int row = 0; row < 4; ++row) {
  3702. if (col == row)
  3703. ids[col][row] = one;
  3704. else
  3705. ids[col][row] = zero;
  3706. }
  3707. }
  3708. // modify components as dictated by the arguments
  3709. if (sources.size() == 1 && isScalar(sources[0])) {
  3710. // a single scalar; resets the diagonals
  3711. for (int col = 0; col < 4; ++col)
  3712. ids[col][col] = sources[0];
  3713. } else if (isMatrix(sources[0])) {
  3714. // constructing from another matrix; copy over the parts that exist in both the argument and constructee
  3715. Id matrix = sources[0];
  3716. unsigned int minCols = std::min(numCols, getNumColumns(matrix));
  3717. unsigned int minRows = std::min(numRows, getNumRows(matrix));
  3718. for (unsigned int col = 0; col < minCols; ++col) {
  3719. std::vector<unsigned> indexes;
  3720. indexes.push_back(col);
  3721. for (unsigned int row = 0; row < minRows; ++row) {
  3722. indexes.push_back(row);
  3723. ids[col][row] = createCompositeExtract(matrix, componentTypeId, indexes);
  3724. indexes.pop_back();
  3725. setPrecision(ids[col][row], precision);
  3726. }
  3727. }
  3728. } else {
  3729. // fill in the matrix in column-major order with whatever argument components are available
  3730. unsigned int row = 0;
  3731. unsigned int col = 0;
  3732. for (unsigned int arg = 0; arg < sources.size() && col < numCols; ++arg) {
  3733. Id argComp = sources[arg];
  3734. for (unsigned int comp = 0; comp < getNumComponents(sources[arg]); ++comp) {
  3735. if (getNumComponents(sources[arg]) > 1) {
  3736. argComp = createCompositeExtract(sources[arg], componentTypeId, comp);
  3737. setPrecision(argComp, precision);
  3738. }
  3739. ids[col][row++] = argComp;
  3740. if (row == numRows) {
  3741. row = 0;
  3742. col++;
  3743. }
  3744. if (col == numCols) {
  3745. // If more components are provided than fit the matrix, discard the rest.
  3746. break;
  3747. }
  3748. }
  3749. }
  3750. }
  3751. // Step 2: Construct a matrix from that array.
  3752. // First make the column vectors, then make the matrix.
  3753. // make the column vectors
  3754. Id columnTypeId = getContainedTypeId(resultTypeId);
  3755. std::vector<Id> matrixColumns;
  3756. for (unsigned int col = 0; col < numCols; ++col) {
  3757. std::vector<Id> vectorComponents;
  3758. for (unsigned int row = 0; row < numRows; ++row)
  3759. vectorComponents.push_back(ids[col][row]);
  3760. Id column = createCompositeConstruct(columnTypeId, vectorComponents);
  3761. setPrecision(column, precision);
  3762. matrixColumns.push_back(column);
  3763. }
  3764. // make the matrix
  3765. return setPrecision(createCompositeConstruct(resultTypeId, matrixColumns), precision);
  3766. }
  3767. // Comments in header
  3768. Builder::If::If(Id cond, SelectionControlMask ctrl, Builder& gb) :
  3769. builder(gb),
  3770. condition(cond),
  3771. control(ctrl),
  3772. elseBlock(nullptr)
  3773. {
  3774. function = &builder.getBuildPoint()->getParent();
  3775. // make the blocks, but only put the then-block into the function,
  3776. // the else-block and merge-block will be added later, in order, after
  3777. // earlier code is emitted
  3778. thenBlock = new Block(builder.getUniqueId(), *function);
  3779. mergeBlock = new Block(builder.getUniqueId(), *function);
  3780. // Save the current block, so that we can add in the flow control split when
  3781. // makeEndIf is called.
  3782. headerBlock = builder.getBuildPoint();
  3783. builder.createSelectionMerge(mergeBlock, control);
  3784. function->addBlock(thenBlock);
  3785. builder.setBuildPoint(thenBlock);
  3786. }
  3787. // Comments in header
  3788. void Builder::If::makeBeginElse()
  3789. {
  3790. // Close out the "then" by having it jump to the mergeBlock
  3791. builder.createBranch(true, mergeBlock);
  3792. // Make the first else block and add it to the function
  3793. elseBlock = new Block(builder.getUniqueId(), *function);
  3794. function->addBlock(elseBlock);
  3795. // Start building the else block
  3796. builder.setBuildPoint(elseBlock);
  3797. }
  3798. // Comments in header
  3799. void Builder::If::makeEndIf()
  3800. {
  3801. // jump to the merge block
  3802. builder.createBranch(true, mergeBlock);
  3803. // Go back to the headerBlock and make the flow control split
  3804. builder.setBuildPoint(headerBlock);
  3805. if (elseBlock)
  3806. builder.createConditionalBranch(condition, thenBlock, elseBlock);
  3807. else
  3808. builder.createConditionalBranch(condition, thenBlock, mergeBlock);
  3809. // add the merge block to the function
  3810. function->addBlock(mergeBlock);
  3811. builder.setBuildPoint(mergeBlock);
  3812. }
  3813. // Comments in header
  3814. void Builder::makeSwitch(Id selector, SelectionControlMask control, int numSegments, const std::vector<int>& caseValues,
  3815. const std::vector<int>& valueIndexToSegment, int defaultSegment,
  3816. std::vector<Block*>& segmentBlocks)
  3817. {
  3818. Function& function = buildPoint->getParent();
  3819. // make all the blocks
  3820. for (int s = 0; s < numSegments; ++s)
  3821. segmentBlocks.push_back(new Block(getUniqueId(), function));
  3822. Block* mergeBlock = new Block(getUniqueId(), function);
  3823. // make and insert the switch's selection-merge instruction
  3824. createSelectionMerge(mergeBlock, control);
  3825. // make the switch instruction
  3826. Instruction* switchInst = new Instruction(NoResult, NoType, Op::OpSwitch);
  3827. switchInst->reserveOperands((caseValues.size() * 2) + 2);
  3828. switchInst->addIdOperand(selector);
  3829. auto defaultOrMerge = (defaultSegment >= 0) ? segmentBlocks[defaultSegment] : mergeBlock;
  3830. switchInst->addIdOperand(defaultOrMerge->getId());
  3831. defaultOrMerge->addPredecessor(buildPoint);
  3832. for (int i = 0; i < (int)caseValues.size(); ++i) {
  3833. switchInst->addImmediateOperand(caseValues[i]);
  3834. switchInst->addIdOperand(segmentBlocks[valueIndexToSegment[i]]->getId());
  3835. segmentBlocks[valueIndexToSegment[i]]->addPredecessor(buildPoint);
  3836. }
  3837. addInstruction(std::unique_ptr<Instruction>(switchInst));
  3838. // push the merge block
  3839. switchMerges.push(mergeBlock);
  3840. }
  3841. // Comments in header
  3842. void Builder::addSwitchBreak(bool implicit)
  3843. {
  3844. // branch to the top of the merge block stack
  3845. createBranch(implicit, switchMerges.top());
  3846. createAndSetNoPredecessorBlock("post-switch-break");
  3847. }
  3848. // Comments in header
  3849. void Builder::nextSwitchSegment(std::vector<Block*>& segmentBlock, int nextSegment)
  3850. {
  3851. int lastSegment = nextSegment - 1;
  3852. if (lastSegment >= 0) {
  3853. // Close out previous segment by jumping, if necessary, to next segment
  3854. if (! buildPoint->isTerminated())
  3855. createBranch(true, segmentBlock[nextSegment]);
  3856. }
  3857. Block* block = segmentBlock[nextSegment];
  3858. block->getParent().addBlock(block);
  3859. setBuildPoint(block);
  3860. }
  3861. // Comments in header
  3862. void Builder::endSwitch(std::vector<Block*>& /*segmentBlock*/)
  3863. {
  3864. // Close out previous segment by jumping, if necessary, to next segment
  3865. if (! buildPoint->isTerminated())
  3866. addSwitchBreak(true);
  3867. switchMerges.top()->getParent().addBlock(switchMerges.top());
  3868. setBuildPoint(switchMerges.top());
  3869. switchMerges.pop();
  3870. }
  3871. Block& Builder::makeNewBlock()
  3872. {
  3873. Function& function = buildPoint->getParent();
  3874. auto block = new Block(getUniqueId(), function);
  3875. function.addBlock(block);
  3876. return *block;
  3877. }
  3878. Builder::LoopBlocks& Builder::makeNewLoop()
  3879. {
  3880. // This verbosity is needed to simultaneously get the same behavior
  3881. // everywhere (id's in the same order), have a syntax that works
  3882. // across lots of versions of C++, have no warnings from pedantic
  3883. // compilation modes, and leave the rest of the code alone.
  3884. Block& head = makeNewBlock();
  3885. Block& body = makeNewBlock();
  3886. Block& merge = makeNewBlock();
  3887. Block& continue_target = makeNewBlock();
  3888. LoopBlocks blocks(head, body, merge, continue_target);
  3889. loops.push(blocks);
  3890. return loops.top();
  3891. }
  3892. void Builder::createLoopContinue()
  3893. {
  3894. createBranch(false, &loops.top().continue_target);
  3895. // Set up a block for dead code.
  3896. createAndSetNoPredecessorBlock("post-loop-continue");
  3897. }
  3898. void Builder::createLoopExit()
  3899. {
  3900. createBranch(false, &loops.top().merge);
  3901. // Set up a block for dead code.
  3902. createAndSetNoPredecessorBlock("post-loop-break");
  3903. }
  3904. void Builder::closeLoop()
  3905. {
  3906. loops.pop();
  3907. }
  3908. void Builder::clearAccessChain()
  3909. {
  3910. accessChain.base = NoResult;
  3911. accessChain.indexChain.clear();
  3912. accessChain.instr = NoResult;
  3913. accessChain.swizzle.clear();
  3914. accessChain.component = NoResult;
  3915. accessChain.preSwizzleBaseType = NoType;
  3916. accessChain.isRValue = false;
  3917. accessChain.coherentFlags.clear();
  3918. accessChain.alignment = 0;
  3919. accessChain.descHeapInfo.descHeapBaseTy = NoResult;
  3920. accessChain.descHeapInfo.descHeapStorageClass = StorageClass::Max;
  3921. accessChain.descHeapInfo.descHeapInstId.clear();
  3922. accessChain.descHeapInfo.descHeapBaseArrayStride = NoResult;
  3923. accessChain.descHeapInfo.structRemappedBase = NoResult;
  3924. accessChain.descHeapInfo.structRsrcTyOffsetCount = 0;
  3925. accessChain.descHeapInfo.structRsrcTyFirstArrIndex = 0;
  3926. }
  3927. // Comments in header
  3928. void Builder::accessChainPushSwizzle(std::vector<unsigned>& swizzle, Id preSwizzleBaseType,
  3929. AccessChain::CoherentFlags coherentFlags, unsigned int alignment)
  3930. {
  3931. accessChain.coherentFlags |= coherentFlags;
  3932. accessChain.alignment |= alignment;
  3933. // swizzles can be stacked in GLSL, but simplified to a single
  3934. // one here; the base type doesn't change
  3935. if (accessChain.preSwizzleBaseType == NoType)
  3936. accessChain.preSwizzleBaseType = preSwizzleBaseType;
  3937. // if needed, propagate the swizzle for the current access chain
  3938. if (accessChain.swizzle.size() > 0) {
  3939. std::vector<unsigned> oldSwizzle = accessChain.swizzle;
  3940. accessChain.swizzle.resize(0);
  3941. for (unsigned int i = 0; i < swizzle.size(); ++i) {
  3942. assert(swizzle[i] < oldSwizzle.size());
  3943. accessChain.swizzle.push_back(oldSwizzle[swizzle[i]]);
  3944. }
  3945. } else
  3946. accessChain.swizzle = swizzle;
  3947. // determine if we need to track this swizzle anymore
  3948. simplifyAccessChainSwizzle();
  3949. }
  3950. // Comments in header
  3951. void Builder::accessChainStore(Id rvalue, Decoration nonUniform, spv::MemoryAccessMask memoryAccess, spv::Scope scope, unsigned int alignment)
  3952. {
  3953. assert(accessChain.isRValue == false);
  3954. transferAccessChainSwizzle(true);
  3955. // MeshShadingEXT outputs don't support loads, so split swizzled stores
  3956. bool isMeshOutput = getStorageClass(accessChain.base) == StorageClass::Output &&
  3957. capabilities.find(spv::Capability::MeshShadingEXT) != capabilities.end();
  3958. // If a swizzle exists and is not full and is not dynamic, then the swizzle will be broken into individual stores.
  3959. if (accessChain.swizzle.size() > 0 &&
  3960. ((getNumTypeComponents(getResultingAccessChainType()) != accessChain.swizzle.size() && accessChain.component == NoResult) || isMeshOutput)) {
  3961. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  3962. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle[i]));
  3963. accessChain.instr = NoResult;
  3964. Id base = collapseAccessChain();
  3965. addDecoration(base, nonUniform);
  3966. accessChain.indexChain.pop_back();
  3967. accessChain.instr = NoResult;
  3968. // dynamic component should be gone
  3969. assert(accessChain.component == NoResult);
  3970. Id source = createCompositeExtract(rvalue, getContainedTypeId(getTypeId(rvalue)), i);
  3971. // take LSB of alignment
  3972. alignment = alignment & ~(alignment & (alignment-1));
  3973. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3974. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3975. }
  3976. createStore(source, base, memoryAccess, scope, alignment);
  3977. }
  3978. }
  3979. else {
  3980. Id base = collapseAccessChain();
  3981. addDecoration(base, nonUniform);
  3982. Id source = rvalue;
  3983. // dynamic component should be gone
  3984. assert(accessChain.component == NoResult);
  3985. // If swizzle still exists, it may be out-of-order, we must load the target vector,
  3986. // extract and insert elements to perform writeMask and/or swizzle.
  3987. if (accessChain.swizzle.size() > 0) {
  3988. Id tempBaseId = createLoad(base, spv::NoPrecision);
  3989. source = createLvalueSwizzle(getTypeId(tempBaseId), tempBaseId, source, accessChain.swizzle);
  3990. }
  3991. // take LSB of alignment
  3992. alignment = alignment & ~(alignment & (alignment-1));
  3993. if (getStorageClass(base) == StorageClass::PhysicalStorageBufferEXT) {
  3994. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  3995. }
  3996. createStore(source, base, memoryAccess, scope, alignment);
  3997. }
  3998. }
  3999. // Comments in header
  4000. Id Builder::accessChainLoad(Decoration precision, Decoration l_nonUniform,
  4001. Decoration r_nonUniform, Id resultType, spv::MemoryAccessMask memoryAccess,
  4002. spv::Scope scope, unsigned int alignment)
  4003. {
  4004. Id id;
  4005. if (accessChain.isRValue) {
  4006. // transfer access chain, but try to stay in registers
  4007. transferAccessChainSwizzle(false);
  4008. if (accessChain.indexChain.size() > 0) {
  4009. Id swizzleBase = accessChain.preSwizzleBaseType != NoType ? accessChain.preSwizzleBaseType : resultType;
  4010. // if all the accesses are constants, we can use OpCompositeExtract
  4011. std::vector<unsigned> indexes;
  4012. bool constant = true;
  4013. for (int i = 0; i < (int)accessChain.indexChain.size(); ++i) {
  4014. if (isConstantScalar(accessChain.indexChain[i]))
  4015. indexes.push_back(getConstantScalar(accessChain.indexChain[i]));
  4016. else {
  4017. constant = false;
  4018. break;
  4019. }
  4020. }
  4021. if (constant) {
  4022. id = createCompositeExtract(accessChain.base, swizzleBase, indexes);
  4023. setPrecision(id, precision);
  4024. } else if (isVector(accessChain.base) || isCooperativeVector(accessChain.base)) {
  4025. assert(accessChain.indexChain.size() == 1);
  4026. id = createVectorExtractDynamic(accessChain.base, resultType, accessChain.indexChain[0]);
  4027. } else {
  4028. Id lValue = NoResult;
  4029. if (spvVersion >= Spv_1_4 && isValidInitializer(accessChain.base)) {
  4030. // make a new function variable for this r-value, using an initializer,
  4031. // and mark it as NonWritable so that downstream it can be detected as a lookup
  4032. // table
  4033. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  4034. "indexable", accessChain.base);
  4035. addDecoration(lValue, Decoration::NonWritable);
  4036. } else {
  4037. lValue = createVariable(NoPrecision, StorageClass::Function, getTypeId(accessChain.base),
  4038. "indexable");
  4039. // store into it
  4040. createStore(accessChain.base, lValue);
  4041. }
  4042. // move base to the new variable
  4043. accessChain.base = lValue;
  4044. accessChain.isRValue = false;
  4045. // load through the access chain
  4046. id = createLoad(collapseAccessChain(), precision);
  4047. }
  4048. } else
  4049. id = accessChain.base; // no precision, it was set when this was defined
  4050. } else {
  4051. transferAccessChainSwizzle(true);
  4052. // take LSB of alignment
  4053. alignment = alignment & ~(alignment & (alignment-1));
  4054. if (getStorageClass(accessChain.base) == StorageClass::PhysicalStorageBufferEXT) {
  4055. memoryAccess = (spv::MemoryAccessMask)(memoryAccess | spv::MemoryAccessMask::Aligned);
  4056. }
  4057. // load through the access chain
  4058. id = collapseAccessChain();
  4059. // Apply nonuniform both to the access chain and the loaded value.
  4060. // Buffer accesses need the access chain decorated, and this is where
  4061. // loaded image types get decorated. TODO: This should maybe move to
  4062. // createImageTextureFunctionCall.
  4063. addDecoration(id, l_nonUniform);
  4064. id = createLoad(id, precision, memoryAccess, scope, alignment);
  4065. addDecoration(id, r_nonUniform);
  4066. }
  4067. // Done, unless there are swizzles to do
  4068. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  4069. return id;
  4070. // Do remaining swizzling
  4071. // Do the basic swizzle
  4072. if (accessChain.swizzle.size() > 0) {
  4073. Id swizzledType = getScalarTypeId(getTypeId(id));
  4074. if (accessChain.swizzle.size() > 1)
  4075. swizzledType = makeVectorType(swizzledType, (int)accessChain.swizzle.size());
  4076. id = createRvalueSwizzle(precision, swizzledType, id, accessChain.swizzle);
  4077. }
  4078. // Do the dynamic component
  4079. if (accessChain.component != NoResult)
  4080. id = setPrecision(createVectorExtractDynamic(id, resultType, accessChain.component), precision);
  4081. addDecoration(id, r_nonUniform);
  4082. return id;
  4083. }
  4084. Id Builder::accessChainGetLValue()
  4085. {
  4086. assert(accessChain.isRValue == false);
  4087. transferAccessChainSwizzle(true);
  4088. Id lvalue = collapseAccessChain();
  4089. // If swizzle exists, it is out-of-order or not full, we must load the target vector,
  4090. // extract and insert elements to perform writeMask and/or swizzle. This does not
  4091. // go with getting a direct l-value pointer.
  4092. assert(accessChain.swizzle.size() == 0);
  4093. assert(accessChain.component == NoResult);
  4094. return lvalue;
  4095. }
  4096. // comment in header
  4097. Id Builder::accessChainGetInferredType()
  4098. {
  4099. // anything to operate on?
  4100. // for untyped pointer, it may be remapped to a descriptor heap.
  4101. // for descriptor heap, its base data type will be determined later,
  4102. // according to load/store results' types.
  4103. if (accessChain.base == NoResult || isUntypedPointer(accessChain.base) ||
  4104. isStructureHeapMember(getTypeId(accessChain.base), accessChain.indexChain, 0) != 0)
  4105. return NoType;
  4106. Id type = getTypeId(accessChain.base);
  4107. // do initial dereference
  4108. if (! accessChain.isRValue)
  4109. type = getContainedTypeId(type);
  4110. // dereference each index
  4111. for (auto it = accessChain.indexChain.cbegin(); it != accessChain.indexChain.cend(); ++it) {
  4112. if (isStructType(type))
  4113. type = getContainedTypeId(type, getConstantScalar(*it));
  4114. else
  4115. type = getContainedTypeId(type);
  4116. }
  4117. // dereference swizzle
  4118. if (accessChain.swizzle.size() == 1)
  4119. type = getContainedTypeId(type);
  4120. else if (accessChain.swizzle.size() > 1)
  4121. type = makeVectorType(getContainedTypeId(type), (int)accessChain.swizzle.size());
  4122. // dereference component selection
  4123. if (accessChain.component)
  4124. type = getContainedTypeId(type);
  4125. return type;
  4126. }
  4127. void Builder::dump(std::vector<unsigned int>& out) const
  4128. {
  4129. // Header, before first instructions:
  4130. out.push_back(MagicNumber);
  4131. out.push_back(spvVersion);
  4132. out.push_back(builderNumber);
  4133. out.push_back(uniqueId + 1);
  4134. out.push_back(0);
  4135. // Capabilities
  4136. for (auto it = capabilities.cbegin(); it != capabilities.cend(); ++it) {
  4137. Instruction capInst(0, 0, Op::OpCapability);
  4138. capInst.addImmediateOperand(*it);
  4139. capInst.dump(out);
  4140. }
  4141. for (auto it = extensions.cbegin(); it != extensions.cend(); ++it) {
  4142. Instruction extInst(0, 0, Op::OpExtension);
  4143. extInst.addStringOperand(it->c_str());
  4144. extInst.dump(out);
  4145. }
  4146. dumpInstructions(out, imports);
  4147. Instruction memInst(0, 0, Op::OpMemoryModel);
  4148. memInst.addImmediateOperand(addressModel);
  4149. memInst.addImmediateOperand(memoryModel);
  4150. memInst.dump(out);
  4151. // Instructions saved up while building:
  4152. dumpInstructions(out, entryPoints);
  4153. dumpInstructions(out, executionModes);
  4154. // Debug instructions
  4155. dumpInstructions(out, strings);
  4156. dumpSourceInstructions(out);
  4157. for (int e = 0; e < (int)sourceExtensions.size(); ++e) {
  4158. Instruction sourceExtInst(0, 0, Op::OpSourceExtension);
  4159. sourceExtInst.addStringOperand(sourceExtensions[e]);
  4160. sourceExtInst.dump(out);
  4161. }
  4162. dumpInstructions(out, names);
  4163. dumpModuleProcesses(out);
  4164. // Annotation instructions
  4165. dumpInstructions(out, decorations);
  4166. dumpInstructions(out, constantsTypesGlobals);
  4167. dumpInstructions(out, externals);
  4168. // The functions
  4169. module.dump(out);
  4170. }
  4171. //
  4172. // Protected methods.
  4173. //
  4174. // Turn the described access chain in 'accessChain' into an instruction(s)
  4175. // computing its address. This *cannot* include complex swizzles, which must
  4176. // be handled after this is called.
  4177. //
  4178. // Can generate code.
  4179. Id Builder::collapseAccessChain()
  4180. {
  4181. assert(accessChain.isRValue == false);
  4182. // did we already emit an access chain for this?
  4183. if (accessChain.instr != NoResult)
  4184. return accessChain.instr;
  4185. // If we have a dynamic component, we can still transfer
  4186. // that into a final operand to the access chain. We need to remap the
  4187. // dynamic component through the swizzle to get a new dynamic component to
  4188. // update.
  4189. //
  4190. // This was not done in transferAccessChainSwizzle() because it might
  4191. // generate code.
  4192. remapDynamicSwizzle();
  4193. if (accessChain.component != NoResult) {
  4194. // transfer the dynamic component to the access chain
  4195. accessChain.indexChain.push_back(accessChain.component);
  4196. accessChain.component = NoResult;
  4197. }
  4198. // note that non-trivial swizzling is left pending
  4199. // do we have an access chain?
  4200. if (accessChain.indexChain.size() == 0)
  4201. return accessChain.base;
  4202. // emit the access chain
  4203. StorageClass storageClass = (StorageClass)module.getStorageClass(getTypeId(accessChain.base));
  4204. // when descHeap info is set, use another access chain process.
  4205. if ((isUntypedPointer(accessChain.base) || accessChain.descHeapInfo.structRsrcTyOffsetCount!= 0) &&
  4206. accessChain.descHeapInfo.descHeapStorageClass != StorageClass::Max) {
  4207. accessChain.instr = createDescHeapAccessChain();
  4208. } else {
  4209. accessChain.instr = createAccessChain(storageClass, accessChain.base, accessChain.indexChain);
  4210. }
  4211. return accessChain.instr;
  4212. }
  4213. // For a dynamic component selection of a swizzle.
  4214. //
  4215. // Turn the swizzle and dynamic component into just a dynamic component.
  4216. //
  4217. // Generates code.
  4218. void Builder::remapDynamicSwizzle()
  4219. {
  4220. // do we have a swizzle to remap a dynamic component through?
  4221. if (accessChain.component != NoResult && accessChain.swizzle.size() > 1) {
  4222. // build a vector of the swizzle for the component to map into
  4223. std::vector<Id> components;
  4224. for (int c = 0; c < (int)accessChain.swizzle.size(); ++c)
  4225. components.push_back(makeUintConstant(accessChain.swizzle[c]));
  4226. Id mapType = makeVectorType(makeUintType(32), (int)accessChain.swizzle.size());
  4227. Id map = makeCompositeConstant(mapType, components);
  4228. // use it
  4229. accessChain.component = createVectorExtractDynamic(map, makeUintType(32), accessChain.component);
  4230. accessChain.swizzle.clear();
  4231. }
  4232. }
  4233. // clear out swizzle if it is redundant, that is reselecting the same components
  4234. // that would be present without the swizzle.
  4235. void Builder::simplifyAccessChainSwizzle()
  4236. {
  4237. // If the swizzle has fewer components than the vector, it is subsetting, and must stay
  4238. // to preserve that fact.
  4239. if (getNumTypeComponents(accessChain.preSwizzleBaseType) > accessChain.swizzle.size())
  4240. return;
  4241. // if components are out of order, it is a swizzle
  4242. for (unsigned int i = 0; i < accessChain.swizzle.size(); ++i) {
  4243. if (i != accessChain.swizzle[i])
  4244. return;
  4245. }
  4246. // otherwise, there is no need to track this swizzle
  4247. accessChain.swizzle.clear();
  4248. if (accessChain.component == NoResult)
  4249. accessChain.preSwizzleBaseType = NoType;
  4250. }
  4251. // To the extent any swizzling can become part of the chain
  4252. // of accesses instead of a post operation, make it so.
  4253. // If 'dynamic' is true, include transferring the dynamic component,
  4254. // otherwise, leave it pending.
  4255. //
  4256. // Does not generate code. just updates the access chain.
  4257. void Builder::transferAccessChainSwizzle(bool dynamic)
  4258. {
  4259. // non existent?
  4260. if (accessChain.swizzle.size() == 0 && accessChain.component == NoResult)
  4261. return;
  4262. // too complex?
  4263. // (this requires either a swizzle, or generating code for a dynamic component)
  4264. if (accessChain.swizzle.size() > 1)
  4265. return;
  4266. // single component, either in the swizzle and/or dynamic component
  4267. if (accessChain.swizzle.size() == 1) {
  4268. assert(accessChain.component == NoResult);
  4269. // handle static component selection
  4270. accessChain.indexChain.push_back(makeUintConstant(accessChain.swizzle.front()));
  4271. accessChain.swizzle.clear();
  4272. accessChain.preSwizzleBaseType = NoType;
  4273. } else if (dynamic && accessChain.component != NoResult) {
  4274. assert(accessChain.swizzle.size() == 0);
  4275. // handle dynamic component
  4276. accessChain.indexChain.push_back(accessChain.component);
  4277. accessChain.preSwizzleBaseType = NoType;
  4278. accessChain.component = NoResult;
  4279. }
  4280. }
  4281. // Utility method for creating a new block and setting the insert point to
  4282. // be in it. This is useful for flow-control operations that need a "dummy"
  4283. // block proceeding them (e.g. instructions after a discard, etc).
  4284. void Builder::createAndSetNoPredecessorBlock(const char* /*name*/)
  4285. {
  4286. Block* block = new Block(getUniqueId(), buildPoint->getParent());
  4287. block->setUnreachable();
  4288. buildPoint->getParent().addBlock(block);
  4289. setBuildPoint(block);
  4290. // if (name)
  4291. // addName(block->getId(), name);
  4292. }
  4293. // Comments in header
  4294. void Builder::createBranch(bool implicit, Block* block)
  4295. {
  4296. Instruction* branch = new Instruction(Op::OpBranch);
  4297. branch->addIdOperand(block->getId());
  4298. if (implicit) {
  4299. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  4300. }
  4301. else {
  4302. addInstruction(std::unique_ptr<Instruction>(branch));
  4303. }
  4304. block->addPredecessor(buildPoint);
  4305. }
  4306. // Create OpConstantSizeOfEXT
  4307. Id Builder::createConstantSizeOfEXT(Id typeId)
  4308. {
  4309. Instruction* inst = new Instruction(getUniqueId(), makeIntType(32), Op::OpConstantSizeOfEXT);
  4310. inst->addIdOperand(typeId);
  4311. constantsTypesGlobals.push_back(std::unique_ptr<Instruction>(inst));
  4312. module.mapInstruction(inst);
  4313. return inst->getResultId();
  4314. }
  4315. void Builder::createSelectionMerge(Block* mergeBlock, SelectionControlMask control)
  4316. {
  4317. Instruction* merge = new Instruction(Op::OpSelectionMerge);
  4318. merge->reserveOperands(2);
  4319. merge->addIdOperand(mergeBlock->getId());
  4320. merge->addImmediateOperand(control);
  4321. addInstruction(std::unique_ptr<Instruction>(merge));
  4322. }
  4323. void Builder::createLoopMerge(Block* mergeBlock, Block* continueBlock, LoopControlMask control,
  4324. const std::vector<unsigned int>& operands)
  4325. {
  4326. Instruction* merge = new Instruction(Op::OpLoopMerge);
  4327. merge->reserveOperands(operands.size() + 3);
  4328. merge->addIdOperand(mergeBlock->getId());
  4329. merge->addIdOperand(continueBlock->getId());
  4330. merge->addImmediateOperand(control);
  4331. for (int op = 0; op < (int)operands.size(); ++op)
  4332. merge->addImmediateOperand(operands[op]);
  4333. addInstruction(std::unique_ptr<Instruction>(merge));
  4334. }
  4335. void Builder::createConditionalBranch(Id condition, Block* thenBlock, Block* elseBlock)
  4336. {
  4337. Instruction* branch = new Instruction(Op::OpBranchConditional);
  4338. branch->reserveOperands(3);
  4339. branch->addIdOperand(condition);
  4340. branch->addIdOperand(thenBlock->getId());
  4341. branch->addIdOperand(elseBlock->getId());
  4342. // A conditional branch is always attached to a condition expression
  4343. addInstructionNoDebugInfo(std::unique_ptr<Instruction>(branch));
  4344. thenBlock->addPredecessor(buildPoint);
  4345. elseBlock->addPredecessor(buildPoint);
  4346. }
  4347. // OpSource
  4348. // [OpSourceContinued]
  4349. // ...
  4350. void Builder::dumpSourceInstructions(const spv::Id fileId, const std::string& text,
  4351. std::vector<unsigned int>& out) const
  4352. {
  4353. const int maxWordCount = 0xFFFF;
  4354. const int opSourceWordCount = 4;
  4355. const int nonNullBytesPerInstruction = 4 * (maxWordCount - opSourceWordCount) - 1;
  4356. if (sourceLang != SourceLanguage::Unknown) {
  4357. // OpSource Language Version File Source
  4358. Instruction sourceInst(NoResult, NoType, Op::OpSource);
  4359. sourceInst.reserveOperands(3);
  4360. sourceInst.addImmediateOperand(sourceLang);
  4361. sourceInst.addImmediateOperand(sourceVersion);
  4362. // File operand
  4363. if (fileId != NoResult) {
  4364. sourceInst.addIdOperand(fileId);
  4365. // Source operand
  4366. if (text.size() > 0) {
  4367. int nextByte = 0;
  4368. std::string subString;
  4369. while ((int)text.size() - nextByte > 0) {
  4370. subString = text.substr(nextByte, nonNullBytesPerInstruction);
  4371. if (nextByte == 0) {
  4372. // OpSource
  4373. sourceInst.addStringOperand(subString.c_str());
  4374. sourceInst.dump(out);
  4375. } else {
  4376. // OpSourcContinued
  4377. Instruction sourceContinuedInst(Op::OpSourceContinued);
  4378. sourceContinuedInst.addStringOperand(subString.c_str());
  4379. sourceContinuedInst.dump(out);
  4380. }
  4381. nextByte += nonNullBytesPerInstruction;
  4382. }
  4383. } else
  4384. sourceInst.dump(out);
  4385. } else
  4386. sourceInst.dump(out);
  4387. }
  4388. }
  4389. // Dump an OpSource[Continued] sequence for the source and every include file
  4390. void Builder::dumpSourceInstructions(std::vector<unsigned int>& out) const
  4391. {
  4392. if (emitNonSemanticShaderDebugInfo) return;
  4393. dumpSourceInstructions(mainFileId, sourceText, out);
  4394. for (auto iItr = includeFiles.begin(); iItr != includeFiles.end(); ++iItr)
  4395. dumpSourceInstructions(iItr->first, *iItr->second, out);
  4396. }
  4397. template <class Range> void Builder::dumpInstructions(std::vector<unsigned int>& out, const Range& instructions) const
  4398. {
  4399. for (const auto& inst : instructions) {
  4400. inst->dump(out);
  4401. }
  4402. }
  4403. void Builder::dumpModuleProcesses(std::vector<unsigned int>& out) const
  4404. {
  4405. for (int i = 0; i < (int)moduleProcesses.size(); ++i) {
  4406. Instruction moduleProcessed(Op::OpModuleProcessed);
  4407. moduleProcessed.addStringOperand(moduleProcesses[i]);
  4408. moduleProcessed.dump(out);
  4409. }
  4410. }
  4411. bool Builder::DecorationInstructionLessThan::operator()(const std::unique_ptr<Instruction>& lhs,
  4412. const std::unique_ptr<Instruction>& rhs) const
  4413. {
  4414. // Order by the id to which the decoration applies first. This is more intuitive.
  4415. assert(lhs->isIdOperand(0) && rhs->isIdOperand(0));
  4416. if (lhs->getIdOperand(0) != rhs->getIdOperand(0)) {
  4417. return lhs->getIdOperand(0) < rhs->getIdOperand(0);
  4418. }
  4419. if (lhs->getOpCode() != rhs->getOpCode())
  4420. return lhs->getOpCode() < rhs->getOpCode();
  4421. // Now compare the operands.
  4422. int minSize = std::min(lhs->getNumOperands(), rhs->getNumOperands());
  4423. for (int i = 1; i < minSize; ++i) {
  4424. if (lhs->isIdOperand(i) != rhs->isIdOperand(i)) {
  4425. return lhs->isIdOperand(i) < rhs->isIdOperand(i);
  4426. }
  4427. if (lhs->isIdOperand(i)) {
  4428. if (lhs->getIdOperand(i) != rhs->getIdOperand(i)) {
  4429. return lhs->getIdOperand(i) < rhs->getIdOperand(i);
  4430. }
  4431. } else {
  4432. if (lhs->getImmediateOperand(i) != rhs->getImmediateOperand(i)) {
  4433. return lhs->getImmediateOperand(i) < rhs->getImmediateOperand(i);
  4434. }
  4435. }
  4436. }
  4437. if (lhs->getNumOperands() != rhs->getNumOperands())
  4438. return lhs->getNumOperands() < rhs->getNumOperands();
  4439. // In this case they are equal.
  4440. return false;
  4441. }
  4442. } // end spv namespace