CGHLSLMSFinishCodeGen.cpp 127 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // CGHLSLMSFinishCodeGen.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Impliment FinishCodeGen. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "llvm/ADT/SmallVector.h"
  12. #include "llvm/ADT/StringRef.h"
  13. #include "llvm/Analysis/DxilValueCache.h"
  14. #include "llvm/IR/CFG.h"
  15. #include "llvm/IR/Function.h"
  16. #include "llvm/IR/GetElementPtrTypeIterator.h"
  17. #include "llvm/IR/IRBuilder.h"
  18. #include "llvm/IR/InstIterator.h"
  19. #include "llvm/IR/Instructions.h"
  20. #include "llvm/IR/Module.h"
  21. #include "llvm/IR/Type.h"
  22. #include "llvm/IR/DerivedTypes.h"
  23. #include "llvm/Transforms/Utils/Cloning.h"
  24. #include "llvm/Transforms/Utils/ValueMapper.h"
  25. #include "CodeGenModule.h"
  26. #include "clang/Basic/LangOptions.h"
  27. #include "clang/Frontend/CodeGenOptions.h"
  28. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  29. #include "dxc/DXIL/DxilConstants.h"
  30. #include "dxc/DXIL/DxilOperations.h"
  31. #include "dxc/DXIL/DxilResourceProperties.h"
  32. #include "dxc/DXIL/DxilTypeSystem.h"
  33. #include "dxc/DXIL/DxilUtil.h"
  34. #include "dxc/DxilRootSignature/DxilRootSignature.h"
  35. #include "dxc/HLSL/DxilExportMap.h"
  36. #include "dxc/HLSL/DxilGenerationPass.h"
  37. #include "dxc/HLSL/HLMatrixType.h"
  38. #include "dxc/HLSL/HLModule.h"
  39. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  40. #include "dxc/HlslIntrinsicOp.h"
  41. #include <fenv.h>
  42. #include <memory>
  43. #include <vector>
  44. #include "CGHLSLMSHelper.h"
  45. using namespace llvm;
  46. using namespace hlsl;
  47. using namespace CGHLSLMSHelper;
  48. namespace {
  49. Value *CreateHandleFromResPtr(Value *ResPtr, HLModule &HLM,
  50. llvm::Type *HandleTy, IRBuilder<> &Builder) {
  51. Module &M = *HLM.GetModule();
  52. // Load to make sure resource only have Ld/St use so mem2reg could remove
  53. // temp resource.
  54. Value *ldObj = Builder.CreateLoad(ResPtr);
  55. Value *args[] = {ldObj};
  56. CallInst *Handle = HLM.EmitHLOperationCall(
  57. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, args, M);
  58. return Handle;
  59. }
  60. CallInst *CreateAnnotateHandle(HLModule &HLM, Value *Handle,
  61. DxilResourceProperties &RP, llvm::Type *ResTy,
  62. IRBuilder<> &Builder) {
  63. Constant *RPConstant = resource_helper::getAsConstant(
  64. RP, HLM.GetOP()->GetResourcePropertiesType(), *HLM.GetShaderModel());
  65. return HLM.EmitHLOperationCall(
  66. Builder, HLOpcodeGroup::HLAnnotateHandle,
  67. (unsigned)HLOpcodeGroup::HLAnnotateHandle, Handle->getType(),
  68. {Handle, RPConstant, UndefValue::get(ResTy)}, *HLM.GetModule());
  69. }
  70. // Lower CBV bitcast use to handle use.
  71. // Leave the load/store.
  72. void LowerDynamicCBVUseToHandle(
  73. HLModule &HLM,
  74. DxilObjectProperties &objectProperties) {
  75. Type *HandleTy = HLM.GetOP()->GetHandleType();
  76. Module &M = *HLM.GetModule();
  77. // Collect BitCast use of CBV.
  78. SmallVector<std::pair<BitCastInst *, DxilResourceProperties>, 4> BitCasts;
  79. for (auto it : objectProperties.resMap) {
  80. DxilResourceProperties RP = it.second;
  81. if (RP.getResourceKind() != DXIL::ResourceKind::CBuffer &&
  82. RP.getResourceKind() != DXIL::ResourceKind::TBuffer)
  83. continue;
  84. Value *V = it.first;
  85. // Skip external globals.
  86. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
  87. if (GV->getLinkage() != GlobalValue::LinkageTypes::InternalLinkage)
  88. continue;
  89. }
  90. for (auto UserIt = V->user_begin(); UserIt != V->user_end();) {
  91. User *U = *(UserIt++);
  92. if (U->user_empty())
  93. continue;
  94. if (BitCastInst *BCI = dyn_cast<BitCastInst>(U)) {
  95. BitCasts.emplace_back(std::make_pair(BCI, RP));
  96. continue;
  97. }
  98. DXASSERT((!isa<BitCastOperator>(U) || U->user_empty()),
  99. "all BitCast should be BitCastInst");
  100. }
  101. }
  102. for (auto it : BitCasts) {
  103. BitCastInst *BCI = it.first;
  104. DxilResourceProperties RP = it.second;
  105. IRBuilder<> B(BCI);
  106. B.AllowFolding = false;
  107. Value *ObjV = BCI->getOperand(0);
  108. Value *Handle = CreateHandleFromResPtr(ObjV, HLM, HandleTy, B);
  109. Type *ResTy = ObjV->getType()->getPointerElementType();
  110. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, B);
  111. // Create cb subscript.
  112. llvm::Type *opcodeTy = B.getInt32Ty();
  113. llvm::Type *idxTy = opcodeTy;
  114. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  115. Type *cbTy = BCI->getType();
  116. llvm::FunctionType *SubscriptFuncTy =
  117. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  118. Function *subscriptFunc =
  119. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  120. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  121. Constant *opArg = ConstantInt::get(
  122. opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  123. Value *args[] = {opArg, Handle, zeroIdx};
  124. Instruction *cbSubscript =
  125. cast<Instruction>(B.CreateCall(subscriptFunc, {args}));
  126. BCI->replaceAllUsesWith(cbSubscript);
  127. BCI->eraseFromParent();
  128. }
  129. }
  130. bool IsHLSLSamplerDescType(llvm::Type *Ty) {
  131. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  132. if (!ST->hasName())
  133. return false;
  134. StringRef name = ST->getName();
  135. if (name == "struct..Sampler")
  136. return true;
  137. }
  138. return false;
  139. }
  140. #ifndef NDEBUG
  141. static bool ConsumePrefix(StringRef &Str, StringRef Prefix) {
  142. if (!Str.startswith(Prefix)) return false;
  143. Str = Str.substr(Prefix.size());
  144. return true;
  145. }
  146. bool IsHLSLBufferViewType(llvm::Type *Ty) {
  147. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  148. if (!ST->hasName())
  149. return false;
  150. StringRef name = ST->getName();
  151. if (!(ConsumePrefix(name, "class.") ||
  152. ConsumePrefix(name, "struct.")))
  153. return false;
  154. if (name.startswith("ConstantBuffer<") ||
  155. name.startswith("TextureBuffer<"))
  156. return true;
  157. }
  158. return false;
  159. }
  160. #endif
  161. void LowerGetResourceFromHeap(
  162. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap) {
  163. llvm::Module &M = *HLM.GetModule();
  164. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  165. unsigned GetResFromHeapOp =
  166. static_cast<unsigned>(IntrinsicOp::IOP_CreateResourceFromHeap);
  167. DenseMap<Instruction *, Instruction *> ResourcePtrToHandlePtrMap;
  168. for (auto it : intrinsicMap) {
  169. unsigned opcode = it.second;
  170. if (opcode != GetResFromHeapOp)
  171. continue;
  172. Function *F = it.first;
  173. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  174. if (group != HLOpcodeGroup::HLIntrinsic)
  175. continue;
  176. for (auto uit = F->user_begin(); uit != F->user_end();) {
  177. CallInst *CI = cast<CallInst>(*(uit++));
  178. // Arg 0 is this pointer.
  179. unsigned ArgIdx = 1;
  180. Instruction *ResPtr = cast<Instruction>(CI->getArgOperand(ArgIdx));
  181. Value *Index = CI->getArgOperand(ArgIdx+1);
  182. IRBuilder<> Builder(CI);
  183. // Make a handle from GetResFromHeap.
  184. Value *IsSampler = Builder.getInt1(
  185. IsHLSLSamplerDescType(ResPtr->getType()->getPointerElementType()));
  186. Value *Handle = HLM.EmitHLOperationCall(
  187. Builder, HLOpcodeGroup::HLIntrinsic, GetResFromHeapOp, HandleTy,
  188. {Index, IsSampler}, M);
  189. // Find the handle ptr for res ptr.
  190. auto it = ResourcePtrToHandlePtrMap.find(ResPtr);
  191. Instruction *HandlePtr = nullptr;
  192. if (it != ResourcePtrToHandlePtrMap.end()) {
  193. HandlePtr = it->second;
  194. } else {
  195. IRBuilder<> AllocaBuilder(
  196. ResPtr->getParent()->getParent()->getEntryBlock().begin());
  197. HandlePtr = AllocaBuilder.CreateAlloca(HandleTy);
  198. ResourcePtrToHandlePtrMap[ResPtr] = HandlePtr;
  199. }
  200. // Store handle to handle ptr.
  201. Builder.CreateStore(Handle, HandlePtr);
  202. CI->eraseFromParent();
  203. }
  204. }
  205. // Replace load of Resource ptr into load of handel ptr.
  206. for (auto it : ResourcePtrToHandlePtrMap) {
  207. Instruction *resPtr = it.first;
  208. Instruction *handlePtr = it.second;
  209. for (auto uit = resPtr->user_begin(); uit != resPtr->user_end();) {
  210. User *U = *(uit++);
  211. BitCastInst *BCI = cast<BitCastInst>(U);
  212. DXASSERT(
  213. dxilutil::IsHLSLResourceType(
  214. BCI->getType()->getPointerElementType()) ||
  215. IsHLSLBufferViewType(BCI->getType()->getPointerElementType()),
  216. "illegal cast of resource ptr");
  217. for (auto cuit = BCI->user_begin(); cuit != BCI->user_end();) {
  218. LoadInst *LI = cast<LoadInst>(*(cuit++));
  219. IRBuilder<> Builder(LI);
  220. Value *Handle = Builder.CreateLoad(handlePtr);
  221. Value *Res =
  222. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLCast,
  223. (unsigned)HLCastOpcode::HandleToResCast,
  224. LI->getType(), {Handle}, M);
  225. LI->replaceAllUsesWith(Res);
  226. LI->eraseFromParent();
  227. }
  228. BCI->eraseFromParent();
  229. }
  230. resPtr->eraseFromParent();
  231. }
  232. }
  233. void ReplaceBoolVectorSubscript(CallInst *CI) {
  234. Value *Ptr = CI->getArgOperand(0);
  235. Value *Idx = CI->getArgOperand(1);
  236. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  237. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  238. Instruction *user = cast<Instruction>(*(It++));
  239. IRBuilder<> Builder(user);
  240. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  241. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  242. Value *NewLd = Builder.CreateLoad(GEP);
  243. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  244. LI->replaceAllUsesWith(cast);
  245. LI->eraseFromParent();
  246. } else {
  247. // Must be a store inst here.
  248. StoreInst *SI = cast<StoreInst>(user);
  249. Value *V = SI->getValueOperand();
  250. Value *cast =
  251. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  252. Builder.CreateStore(cast, GEP);
  253. SI->eraseFromParent();
  254. }
  255. }
  256. CI->eraseFromParent();
  257. }
  258. void ReplaceBoolVectorSubscript(Function *F) {
  259. for (auto It = F->user_begin(), E = F->user_end(); It != E;) {
  260. User *user = *(It++);
  261. CallInst *CI = cast<CallInst>(user);
  262. ReplaceBoolVectorSubscript(CI);
  263. }
  264. }
  265. // Returns a valid field annotation (if present) for the matrix type of templated
  266. // resource on matrix type.
  267. // Example:-
  268. // AppendStructuredBuffer<float4x4> abuf;
  269. // Return the field annotation of the matrix type in the above decl.
  270. static DxilFieldAnnotation* GetTemplatedResMatAnnotation(Function *F, unsigned argOpIdx,
  271. unsigned matAnnotationIdx) {
  272. for (User* U : F->users()) {
  273. if (CallInst* CI = dyn_cast<CallInst>(U)) {
  274. if (argOpIdx >= CI->getNumArgOperands())
  275. continue;
  276. Value *resArg = CI->getArgOperand(argOpIdx);
  277. Type* resArgTy = resArg->getType();
  278. if (resArgTy->isPointerTy())
  279. resArgTy = cast<PointerType>(resArgTy)->getPointerElementType();
  280. if (isa<StructType>(resArgTy)) {
  281. DxilTypeSystem& TS = F->getParent()->GetHLModule().GetTypeSystem();
  282. auto *SA = TS.GetStructAnnotation(cast<StructType>(resArgTy));
  283. auto *FA = &(SA->GetFieldAnnotation(matAnnotationIdx));
  284. if (FA && FA->HasMatrixAnnotation()) {
  285. return FA;
  286. }
  287. }
  288. }
  289. }
  290. return nullptr;
  291. }
  292. // Add function body for intrinsic if possible.
  293. Function *CreateOpFunction(llvm::Module &M, Function *F,
  294. llvm::FunctionType *funcTy, HLOpcodeGroup group,
  295. unsigned opcode) {
  296. Function *opFunc = nullptr;
  297. AttributeSet attribs = F->getAttributes().getFnAttributes();
  298. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  299. if (group == HLOpcodeGroup::HLIntrinsic) {
  300. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  301. switch (intriOp) {
  302. case IntrinsicOp::MOP_Append:
  303. case IntrinsicOp::MOP_Consume: {
  304. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  305. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  306. // Don't generate body for OutputStream::Append.
  307. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  308. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  309. break;
  310. }
  311. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  312. bAppend ? "append" : "consume");
  313. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  314. llvm::FunctionType *IncCounterFuncTy =
  315. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  316. unsigned counterOpcode =
  317. bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter
  318. : (unsigned)IntrinsicOp::MOP_DecrementCounter;
  319. Function *incCounterFunc = GetOrCreateHLFunction(
  320. M, IncCounterFuncTy, group, counterOpcode, attribs);
  321. llvm::Type *idxTy = counterTy;
  322. llvm::Type *valTy =
  323. bAppend ? funcTy->getParamType(HLOperandIndex::kAppendValOpIndex)
  324. : funcTy->getReturnType();
  325. // Return type for subscript should be pointer type, hence in memory
  326. // representation
  327. llvm::Type *subscriptTy = valTy;
  328. bool isBoolScalarOrVector = false;
  329. if (!subscriptTy->isPointerTy()) {
  330. if (subscriptTy->getScalarType()->isIntegerTy(1)) {
  331. isBoolScalarOrVector = true;
  332. llvm::Type *memReprType =
  333. llvm::IntegerType::get(subscriptTy->getContext(), 32);
  334. subscriptTy =
  335. subscriptTy->isVectorTy()
  336. ? llvm::VectorType::get(memReprType,
  337. subscriptTy->getVectorNumElements())
  338. : memReprType;
  339. }
  340. subscriptTy = llvm::PointerType::get(subscriptTy, 0);
  341. }
  342. llvm::FunctionType *SubscriptFuncTy = llvm::FunctionType::get(
  343. subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  344. Function *subscriptFunc = GetOrCreateHLFunction(
  345. M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  346. (unsigned)HLSubscriptOpcode::DefaultSubscript, attribs);
  347. BasicBlock *BB =
  348. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  349. IRBuilder<> Builder(BB);
  350. auto argIter = opFunc->args().begin();
  351. // Skip the opcode arg.
  352. argIter++;
  353. Argument *thisArg = argIter++;
  354. // int counter = IncrementCounter/DecrementCounter(Buf);
  355. Value *incCounterOpArg = ConstantInt::get(idxTy, counterOpcode);
  356. Value *counter =
  357. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  358. // Buf[counter];
  359. Value *subscriptOpArg = ConstantInt::get(
  360. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  361. Value *subscript =
  362. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  363. constexpr unsigned kArgIdx = 0;
  364. constexpr unsigned kMatAnnotationIdx = 0;
  365. DxilFieldAnnotation* MatAnnotation = HLMatrixType::isa(valTy) ?
  366. GetTemplatedResMatAnnotation(F, kArgIdx, kMatAnnotationIdx) : nullptr;
  367. if (bAppend) {
  368. Argument *valArg = argIter;
  369. // Buf[counter] = val;
  370. if (valTy->isPointerTy()) {
  371. unsigned size = M.getDataLayout().getTypeAllocSize(
  372. subscript->getType()->getPointerElementType());
  373. Builder.CreateMemCpy(subscript, valArg, size, 1);
  374. } else if (MatAnnotation) {
  375. // If the to-be-stored value is a matrix then we need to generate
  376. // an HL matrix store which is then handled appropriately in HLMatrixLowerPass.
  377. bool isRowMajor = MatAnnotation->GetMatrixAnnotation().Orientation == MatrixOrientation::RowMajor;
  378. Value* matStoreVal = valArg;
  379. // The in-reg matrix orientation is always row-major.
  380. // If the in-memory matrix orientation is col-major, then we
  381. // need to change the orientation to col-major before storing
  382. // to memory
  383. if (!isRowMajor) {
  384. unsigned castOpCode = (unsigned)HLCastOpcode::RowMatrixToColMatrix;
  385. // Construct signature of the function that is used for converting
  386. // orientation of a matrix from row-major to col-major.
  387. FunctionType* MatCastFnType = FunctionType::get(
  388. matStoreVal->getType(), { Builder.getInt32Ty(), matStoreVal->getType() },
  389. /* isVarArg */ false);
  390. // Create the conversion function.
  391. Function* MatCastFn = GetOrCreateHLFunction(
  392. M, MatCastFnType, HLOpcodeGroup::HLCast, castOpCode);
  393. Value* MatCastOpCode = ConstantInt::get(Builder.getInt32Ty(), castOpCode);
  394. // Insert call to the conversion function.
  395. matStoreVal = Builder.CreateCall(MatCastFn, { MatCastOpCode, matStoreVal });
  396. }
  397. unsigned storeOpCode = isRowMajor ? (unsigned) HLMatLoadStoreOpcode::RowMatStore
  398. : (unsigned) HLMatLoadStoreOpcode::ColMatStore;
  399. // Construct signature of the function that is used for storing
  400. // the matrix value to the memory.
  401. FunctionType* MatStFnType = FunctionType::get(
  402. Builder.getVoidTy(), { Builder.getInt32Ty(), subscriptTy, matStoreVal->getType() },
  403. /* isVarArg */ false);
  404. // Create the matrix store function.
  405. Function* MatStFn = GetOrCreateHLFunction(
  406. M, MatStFnType, HLOpcodeGroup::HLMatLoadStore, storeOpCode);
  407. Value* MatStOpCode = ConstantInt::get(Builder.getInt32Ty(), storeOpCode);
  408. // Insert call to the matrix store function.
  409. Builder.CreateCall(MatStFn, { MatStOpCode, subscript, matStoreVal });
  410. }
  411. else {
  412. Value* storedVal = valArg;
  413. // Convert to memory representation
  414. if (isBoolScalarOrVector)
  415. storedVal = Builder.CreateZExt(
  416. storedVal, subscriptTy->getPointerElementType(), "frombool");
  417. Builder.CreateStore(storedVal, subscript);
  418. }
  419. Builder.CreateRetVoid();
  420. } else {
  421. // return Buf[counter];
  422. if (valTy->isPointerTy())
  423. Builder.CreateRet(subscript);
  424. else if (MatAnnotation) {
  425. // If the to-be-loaded value is a matrix then we need to generate
  426. // an HL matrix load which is then handled appropriately in HLMatrixLowerPass.
  427. bool isRowMajor = MatAnnotation->GetMatrixAnnotation().Orientation == MatrixOrientation::RowMajor;
  428. unsigned loadOpCode = isRowMajor ? (unsigned)HLMatLoadStoreOpcode::RowMatLoad
  429. : (unsigned)HLMatLoadStoreOpcode::ColMatLoad;
  430. // Construct signature of the function that is used for loading
  431. // the matrix value from the memory.
  432. FunctionType* MatLdFnType = FunctionType::get(valTy, { Builder.getInt32Ty(), subscriptTy },
  433. /* isVarArg */ false);
  434. // Create the matrix load function.
  435. Function* MatLdFn = GetOrCreateHLFunction(
  436. M, MatLdFnType, HLOpcodeGroup::HLMatLoadStore, loadOpCode);
  437. Value* MatStOpCode = ConstantInt::get(Builder.getInt32Ty(), loadOpCode);
  438. // Insert call to the matrix load function.
  439. Value *matLdVal = Builder.CreateCall(MatLdFn, { MatStOpCode, subscript });
  440. // The in-reg matrix orientation is always row-major.
  441. // If the in-memory matrix orientation is col-major, then we
  442. // need to change the orientation to row-major after loading
  443. // from memory.
  444. if (!isRowMajor) {
  445. unsigned castOpCode = (unsigned)HLCastOpcode::ColMatrixToRowMatrix;
  446. // Construct signature of the function that is used for converting
  447. // orientation of a matrix from col-major to row-major.
  448. FunctionType* MatCastFnType = FunctionType::get(
  449. matLdVal->getType(), { Builder.getInt32Ty(), matLdVal->getType() },
  450. /* isVarArg */ false);
  451. // Create the conversion function.
  452. Function* MatCastFn = GetOrCreateHLFunction(
  453. M, MatCastFnType, HLOpcodeGroup::HLCast, castOpCode);
  454. Value* MatCastOpCode = ConstantInt::get(Builder.getInt32Ty(), castOpCode);
  455. // Insert call to the conversion function.
  456. matLdVal = Builder.CreateCall(MatCastFn, { MatCastOpCode, matLdVal });
  457. }
  458. Builder.CreateRet(matLdVal);
  459. }
  460. else {
  461. Value *retVal = Builder.CreateLoad(subscript);
  462. // Convert to register representation
  463. if (isBoolScalarOrVector)
  464. retVal = Builder.CreateICmpNE(
  465. retVal, Constant::getNullValue(retVal->getType()), "tobool");
  466. Builder.CreateRet(retVal);
  467. }
  468. }
  469. } break;
  470. case IntrinsicOp::IOP_sincos: {
  471. opFunc =
  472. GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  473. llvm::Type *valTy =
  474. funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  475. llvm::FunctionType *sinFuncTy =
  476. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  477. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  478. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  479. Function *sinFunc =
  480. GetOrCreateHLFunction(M, sinFuncTy, group, sinOp, attribs);
  481. Function *cosFunc =
  482. GetOrCreateHLFunction(M, sinFuncTy, group, cosOp, attribs);
  483. BasicBlock *BB =
  484. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  485. IRBuilder<> Builder(BB);
  486. auto argIter = opFunc->args().begin();
  487. // Skip the opcode arg.
  488. argIter++;
  489. Argument *valArg = argIter++;
  490. Argument *sinPtrArg = argIter++;
  491. Argument *cosPtrArg = argIter++;
  492. Value *sinOpArg = ConstantInt::get(opcodeTy, sinOp);
  493. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  494. Builder.CreateStore(sinVal, sinPtrArg);
  495. Value *cosOpArg = ConstantInt::get(opcodeTy, cosOp);
  496. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  497. Builder.CreateStore(cosVal, cosPtrArg);
  498. // Ret.
  499. Builder.CreateRetVoid();
  500. } break;
  501. default:
  502. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  503. break;
  504. }
  505. } else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  506. llvm::StringRef fnName = F->getName();
  507. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  508. opFunc = GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName,
  509. opcode, attribs);
  510. } else {
  511. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  512. }
  513. return opFunc;
  514. }
  515. DxilResourceProperties GetResourcePropsFromIntrinsicObjectArg(
  516. Value *arg, HLModule &HLM, DxilTypeSystem &typeSys,
  517. DxilObjectProperties &objectProperties) {
  518. DxilResourceProperties RP = objectProperties.GetResource(arg);
  519. if (RP.isValid())
  520. return RP;
  521. // Must be GEP.
  522. GEPOperator *GEP = cast<GEPOperator>(arg);
  523. // Find RP from GEP.
  524. Value *Ptr = GEP->getPointerOperand();
  525. // When Ptr is array of resource, check if it is another GEP.
  526. while (
  527. dxilutil::IsHLSLResourceType(dxilutil::GetArrayEltTy(Ptr->getType()))) {
  528. if (GEPOperator *ParentGEP = dyn_cast<GEPOperator>(Ptr)) {
  529. GEP = ParentGEP;
  530. Ptr = GEP->getPointerOperand();
  531. } else {
  532. break;
  533. }
  534. }
  535. // When ptr is array of resource, ptr could be in
  536. // objectProperties.
  537. RP = objectProperties.GetResource(Ptr);
  538. if (RP.isValid())
  539. return RP;
  540. DxilStructAnnotation *Anno = nullptr;
  541. for (auto gepIt = gep_type_begin(GEP), E = gep_type_end(GEP); gepIt != E;
  542. ++gepIt) {
  543. if (StructType *ST = dyn_cast<StructType>(*gepIt)) {
  544. Anno = typeSys.GetStructAnnotation(ST);
  545. DXASSERT(Anno, "missing type annotation");
  546. unsigned Index =
  547. cast<ConstantInt>(gepIt.getOperand())->getLimitedValue();
  548. DxilFieldAnnotation &fieldAnno = Anno->GetFieldAnnotation(Index);
  549. if (fieldAnno.HasResourceAttribute()) {
  550. MDNode *resAttrib = fieldAnno.GetResourceAttribute();
  551. DxilResourceBase R(DXIL::ResourceClass::Invalid);
  552. HLM.LoadDxilResourceBaseFromMDNode(resAttrib, R);
  553. switch (R.GetClass()) {
  554. case DXIL::ResourceClass::SRV:
  555. case DXIL::ResourceClass::UAV: {
  556. DxilResource Res;
  557. HLM.LoadDxilResourceFromMDNode(resAttrib, Res);
  558. RP = resource_helper::loadPropsFromResourceBase(&Res);
  559. } break;
  560. case DXIL::ResourceClass::Sampler: {
  561. DxilSampler Sampler;
  562. HLM.LoadDxilSamplerFromMDNode(resAttrib, Sampler);
  563. RP = resource_helper::loadPropsFromResourceBase(&Sampler);
  564. } break;
  565. default:
  566. DXASSERT(0, "invalid resource attribute in filed annotation");
  567. break;
  568. }
  569. break;
  570. }
  571. }
  572. }
  573. DXASSERT(RP.isValid(), "invalid resource properties");
  574. return RP;
  575. }
  576. void AddOpcodeParamForIntrinsic(
  577. HLModule &HLM, Function *F, unsigned opcode, llvm::Type *HandleTy,
  578. DxilObjectProperties &objectProperties) {
  579. llvm::Module &M = *HLM.GetModule();
  580. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  581. SmallVector<llvm::Type *, 4> paramTyList;
  582. // Add the opcode param
  583. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  584. paramTyList.emplace_back(opcodeTy);
  585. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  586. for (unsigned i = 1; i < paramTyList.size(); i++) {
  587. llvm::Type *Ty = paramTyList[i];
  588. if (Ty->isPointerTy()) {
  589. Ty = Ty->getPointerElementType();
  590. if (dxilutil::IsHLSLResourceType(Ty)) {
  591. // Use handle type for resource type.
  592. // This will make sure temp object variable only used by createHandle.
  593. paramTyList[i] = HandleTy;
  594. }
  595. }
  596. }
  597. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  598. if (group == HLOpcodeGroup::HLSubscript &&
  599. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  600. llvm::FunctionType *FT = F->getFunctionType();
  601. llvm::Type *VecArgTy = FT->getParamType(0);
  602. llvm::VectorType *VType =
  603. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  604. llvm::Type *Ty = VType->getElementType();
  605. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  606. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  607. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1,
  608. "Only bool could use VectorSubscript");
  609. // The return type is i8*.
  610. // Replace all uses with i1*.
  611. ReplaceBoolVectorSubscript(F);
  612. return;
  613. }
  614. bool isDoubleSubscriptFunc =
  615. group == HLOpcodeGroup::HLSubscript &&
  616. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  617. llvm::Type *RetTy = oldFuncTy->getReturnType();
  618. if (isDoubleSubscriptFunc) {
  619. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  620. // Change currentIdx type into coord type.
  621. auto U = doubleSub->user_begin();
  622. Value *user = *U;
  623. CallInst *secSub = cast<CallInst>(user);
  624. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  625. // opcode operand not add yet, so the index need -1.
  626. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  627. HLOpcodeGroup::NotHL)
  628. coordIdx -= 1;
  629. Value *coord = secSub->getArgOperand(coordIdx);
  630. llvm::Type *coordTy = coord->getType();
  631. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  632. // Add the sampleIdx or mipLevel parameter to the end.
  633. paramTyList.emplace_back(opcodeTy);
  634. // Change return type to be resource ret type.
  635. // opcode operand not add yet, so the index need -1.
  636. Value *objPtr =
  637. doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx - 1);
  638. // Must be a GEP
  639. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  640. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  641. llvm::Type *resTy = nullptr;
  642. while (GEPIt != E) {
  643. if (dxilutil::IsHLSLResourceType(*GEPIt)) {
  644. resTy = *GEPIt;
  645. break;
  646. }
  647. GEPIt++;
  648. }
  649. DXASSERT(resTy, "must find the resource type");
  650. // Change object type to handle type.
  651. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  652. // Change RetTy into pointer of resource reture type.
  653. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  654. }
  655. llvm::FunctionType *funcTy =
  656. llvm::FunctionType::get(RetTy, paramTyList, oldFuncTy->isVarArg());
  657. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  658. StringRef lower = hlsl::GetHLLowerStrategy(F);
  659. if (!lower.empty())
  660. hlsl::SetHLLowerStrategy(opFunc, lower);
  661. DxilTypeSystem &typeSys = HLM.GetTypeSystem();
  662. for (auto user = F->user_begin(); user != F->user_end();) {
  663. // User must be a call.
  664. CallInst *oldCI = cast<CallInst>(*(user++));
  665. SmallVector<Value *, 4> opcodeParamList;
  666. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  667. opcodeParamList.emplace_back(opcodeConst);
  668. opcodeParamList.append(oldCI->arg_operands().begin(),
  669. oldCI->arg_operands().end());
  670. IRBuilder<> Builder(oldCI);
  671. if (isDoubleSubscriptFunc) {
  672. // Change obj to the resource pointer.
  673. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  674. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  675. SmallVector<Value *, 8> IndexList;
  676. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  677. Value *lastIndex = IndexList.back();
  678. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  679. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1,
  680. "last index must 1");
  681. // Remove the last index.
  682. IndexList.pop_back();
  683. objVal = objGEP->getPointerOperand();
  684. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  685. objVal, HLM, typeSys, objectProperties);
  686. if (IndexList.size() > 1)
  687. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  688. Value *Handle = CreateHandleFromResPtr(objVal, HLM, HandleTy, Builder);
  689. Type *ResTy = objVal->getType()->getPointerElementType();
  690. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  691. // Change obj to the resource pointer.
  692. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  693. // Set idx and mipIdx.
  694. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  695. auto U = oldCI->user_begin();
  696. Value *user = *U;
  697. CallInst *secSub = cast<CallInst>(user);
  698. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  699. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  700. HLOpcodeGroup::NotHL)
  701. idxOpIndex--;
  702. Value *idx = secSub->getArgOperand(idxOpIndex);
  703. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  704. // Add the sampleIdx or mipLevel parameter to the end.
  705. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  706. opcodeParamList.emplace_back(mipIdx);
  707. // Insert new call before secSub to make sure idx is ready to use.
  708. Builder.SetInsertPoint(secSub);
  709. }
  710. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  711. Value *arg = opcodeParamList[i];
  712. llvm::Type *Ty = arg->getType();
  713. if (Ty->isPointerTy()) {
  714. Ty = Ty->getPointerElementType();
  715. if (dxilutil::IsHLSLResourceType(Ty)) {
  716. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  717. arg, HLM, typeSys, objectProperties);
  718. // Use object type directly, not by pointer.
  719. // This will make sure temp object variable only used by ld/st.
  720. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  721. std::vector<Value *> idxList(argGEP->idx_begin(),
  722. argGEP->idx_end());
  723. // Create instruction to avoid GEPOperator.
  724. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(
  725. argGEP->getPointerOperand(), idxList);
  726. Builder.Insert(GEP);
  727. arg = GEP;
  728. }
  729. llvm::Type *ResTy = arg->getType()->getPointerElementType();
  730. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy, Builder);
  731. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  732. opcodeParamList[i] = Handle;
  733. }
  734. }
  735. }
  736. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  737. if (!isDoubleSubscriptFunc) {
  738. // replace new call and delete the old call
  739. oldCI->replaceAllUsesWith(CI);
  740. oldCI->eraseFromParent();
  741. } else {
  742. // For double script.
  743. // Replace single users use with new CI.
  744. auto U = oldCI->user_begin();
  745. Value *user = *U;
  746. CallInst *secSub = cast<CallInst>(user);
  747. secSub->replaceAllUsesWith(CI);
  748. secSub->eraseFromParent();
  749. oldCI->eraseFromParent();
  750. }
  751. }
  752. // delete the function
  753. F->eraseFromParent();
  754. }
  755. void AddOpcodeParamForIntrinsics(
  756. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  757. DxilObjectProperties &objectProperties) {
  758. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  759. for (auto mapIter : intrinsicMap) {
  760. Function *F = mapIter.first;
  761. if (F->user_empty()) {
  762. // delete the function
  763. F->eraseFromParent();
  764. continue;
  765. }
  766. unsigned opcode = mapIter.second;
  767. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, objectProperties);
  768. }
  769. }
  770. } // namespace
  771. namespace {
  772. // Returns true a global value is being updated
  773. bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  774. bool isWriteEnabled = false;
  775. if (V && visited.find(V) == visited.end()) {
  776. visited.insert(V);
  777. for (User *U : V->users()) {
  778. if (isa<StoreInst>(U)) {
  779. return true;
  780. } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
  781. Function *F = CI->getCalledFunction();
  782. if (!F->isIntrinsic()) {
  783. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  784. switch (hlGroup) {
  785. case HLOpcodeGroup::NotHL:
  786. return true;
  787. case HLOpcodeGroup::HLMatLoadStore: {
  788. HLMatLoadStoreOpcode opCode =
  789. static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  790. if (opCode == HLMatLoadStoreOpcode::ColMatStore ||
  791. opCode == HLMatLoadStoreOpcode::RowMatStore)
  792. return true;
  793. break;
  794. }
  795. case HLOpcodeGroup::HLCast:
  796. case HLOpcodeGroup::HLSubscript:
  797. if (GlobalHasStoreUserRec(U, visited))
  798. return true;
  799. break;
  800. default:
  801. break;
  802. }
  803. }
  804. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  805. if (GlobalHasStoreUserRec(U, visited))
  806. return true;
  807. }
  808. }
  809. }
  810. return isWriteEnabled;
  811. }
  812. // Returns true if any of the direct user of a global is a store inst
  813. // otherwise recurse through the remaining users and check if any GEP
  814. // exists and which in turn has a store inst as user.
  815. bool GlobalHasStoreUser(GlobalVariable *GV) {
  816. std::set<Value *> visited;
  817. Value *V = cast<Value>(GV);
  818. return GlobalHasStoreUserRec(V, visited);
  819. }
  820. GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  821. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  822. GV->getType()->getPointerElementType());
  823. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  824. if (GV->hasInitializer()) {
  825. NGV->setInitializer(GV->getInitializer());
  826. } else {
  827. // The copy being static, it should be initialized per llvm rules
  828. NGV->setInitializer(
  829. Constant::getNullValue(GV->getType()->getPointerElementType()));
  830. }
  831. // static global should have internal linkage
  832. NGV->setLinkage(GlobalValue::InternalLinkage);
  833. return NGV;
  834. }
  835. void CreateWriteEnabledStaticGlobals(llvm::Module *M, llvm::Function *EF) {
  836. std::vector<GlobalVariable *> worklist;
  837. for (GlobalVariable &GV : M->globals()) {
  838. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  839. // skip globals which are HLSL objects or group shared
  840. !dxilutil::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  841. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  842. if (GlobalHasStoreUser(&GV))
  843. worklist.emplace_back(&GV);
  844. // TODO: Ensure that constant globals aren't using initializer
  845. GV.setConstant(true);
  846. }
  847. }
  848. IRBuilder<> Builder(
  849. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  850. for (GlobalVariable *GV : worklist) {
  851. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  852. GV->replaceAllUsesWith(NGV);
  853. // insert memcpy in all entryblocks
  854. uint64_t size = M->getDataLayout().getTypeAllocSize(
  855. GV->getType()->getPointerElementType());
  856. Builder.CreateMemCpy(NGV, GV, size, 1);
  857. }
  858. }
  859. } // namespace
  860. namespace {
  861. void SetEntryFunction(HLModule &HLM, Function *Entry,
  862. clang::CodeGen::CodeGenModule &CGM) {
  863. if (Entry == nullptr) {
  864. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  865. unsigned DiagID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  866. "cannot find entry function %0");
  867. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  868. return;
  869. }
  870. HLM.SetEntryFunction(Entry);
  871. }
  872. Function *CloneFunction(Function *Orig, const llvm::Twine &Name,
  873. llvm::Module *llvmModule, hlsl::DxilTypeSystem &TypeSys,
  874. hlsl::DxilTypeSystem &SrcTypeSys) {
  875. Function *F = Function::Create(Orig->getFunctionType(),
  876. GlobalValue::LinkageTypes::ExternalLinkage,
  877. Name, llvmModule);
  878. SmallVector<ReturnInst *, 2> Returns;
  879. ValueToValueMapTy vmap;
  880. // Map params.
  881. auto entryParamIt = F->arg_begin();
  882. for (Argument &param : Orig->args()) {
  883. vmap[&param] = (entryParamIt++);
  884. }
  885. llvm::CloneFunctionInto(F, Orig, vmap, /*ModuleLevelChagnes*/ false, Returns);
  886. TypeSys.CopyFunctionAnnotation(F, Orig, SrcTypeSys);
  887. return F;
  888. }
  889. // Clone shader entry function to be called by other functions.
  890. // The original function will be used as shader entry.
  891. void CloneShaderEntry(Function *ShaderF, StringRef EntryName, HLModule &HLM) {
  892. Function *F = CloneFunction(ShaderF, "", HLM.GetModule(), HLM.GetTypeSystem(),
  893. HLM.GetTypeSystem());
  894. F->takeName(ShaderF);
  895. F->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  896. // Set to name before mangled.
  897. ShaderF->setName(EntryName);
  898. DxilFunctionAnnotation *annot = HLM.GetFunctionAnnotation(F);
  899. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  900. // Clear semantic for cloned one.
  901. cloneRetAnnot.SetSemanticString("");
  902. cloneRetAnnot.SetSemanticIndexVec({});
  903. for (unsigned i = 0; i < annot->GetNumParameters(); i++) {
  904. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  905. // Clear semantic for cloned one.
  906. cloneParamAnnot.SetSemanticString("");
  907. cloneParamAnnot.SetSemanticIndexVec({});
  908. }
  909. }
  910. } // namespace
  911. namespace {
  912. bool IsPatchConstantFunction(
  913. const Function *F, StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  914. DXASSERT_NOMSG(F != nullptr);
  915. for (auto &&p : patchConstantFunctionMap) {
  916. if (p.second.Func == F)
  917. return true;
  918. }
  919. return false;
  920. }
  921. void SetPatchConstantFunctionWithAttr(
  922. const EntryFunctionInfo &EntryFunc,
  923. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr,
  924. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  925. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  926. &patchConstantFunctionPropsMap,
  927. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  928. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  929. auto Entry = patchConstantFunctionMap.find(funcName);
  930. if (Entry == patchConstantFunctionMap.end()) {
  931. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  932. unsigned DiagID = Diags.getCustomDiagID(
  933. clang::DiagnosticsEngine::Error, "Cannot find patchconstantfunc %0.");
  934. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  935. return;
  936. }
  937. if (Entry->second.NumOverloads != 1) {
  938. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  939. unsigned DiagID =
  940. Diags.getCustomDiagID(clang::DiagnosticsEngine::Warning,
  941. "Multiple overloads of patchconstantfunc %0.");
  942. unsigned NoteID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Note,
  943. "This overload was selected.");
  944. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  945. Diags.Report(Entry->second.SL, NoteID);
  946. }
  947. Function *patchConstFunc = Entry->second.Func;
  948. DXASSERT(
  949. HLM.HasDxilFunctionProps(EntryFunc.Func),
  950. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  951. "HS entry.");
  952. DxilFunctionProps *HSProps = &HLM.GetDxilFunctionProps(EntryFunc.Func);
  953. HLM.SetPatchConstantFunctionForHS(EntryFunc.Func, patchConstFunc);
  954. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  955. // Check no inout parameter for patch constant function.
  956. DxilFunctionAnnotation *patchConstFuncAnnotation =
  957. HLM.GetFunctionAnnotation(patchConstFunc);
  958. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  959. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  960. .GetParamInputQual() == DxilParamInputQual::Inout) {
  961. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  962. unsigned DiagID = Diags.getCustomDiagID(
  963. clang::DiagnosticsEngine::Error,
  964. "Patch Constant function %0 should not have inout param.");
  965. Diags.Report(Entry->second.SL, DiagID) << funcName;
  966. }
  967. }
  968. // Input/Output control point validation.
  969. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  970. const DxilFunctionProps &patchProps =
  971. *patchConstantFunctionPropsMap[patchConstFunc];
  972. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  973. patchProps.ShaderProps.HS.inputControlPoints !=
  974. HSProps->ShaderProps.HS.inputControlPoints) {
  975. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  976. unsigned DiagID =
  977. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  978. "Patch constant function's input patch input "
  979. "should have %0 elements, but has %1.");
  980. Diags.Report(Entry->second.SL, DiagID)
  981. << HSProps->ShaderProps.HS.inputControlPoints
  982. << patchProps.ShaderProps.HS.inputControlPoints;
  983. }
  984. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  985. patchProps.ShaderProps.HS.outputControlPoints !=
  986. HSProps->ShaderProps.HS.outputControlPoints) {
  987. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  988. unsigned DiagID =
  989. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  990. "Patch constant function's output patch input "
  991. "should have %0 elements, but has %1.");
  992. Diags.Report(Entry->second.SL, DiagID)
  993. << HSProps->ShaderProps.HS.outputControlPoints
  994. << patchProps.ShaderProps.HS.outputControlPoints;
  995. }
  996. }
  997. }
  998. void SetPatchConstantFunction(
  999. const EntryFunctionInfo &EntryFunc,
  1000. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  1001. &HSEntryPatchConstantFuncAttr,
  1002. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  1003. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  1004. &patchConstantFunctionPropsMap,
  1005. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  1006. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  1007. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  1008. "we have checked this in AddHLSLFunctionInfo()");
  1009. SetPatchConstantFunctionWithAttr(EntryFunc, AttrsIter->second,
  1010. patchConstantFunctionMap,
  1011. patchConstantFunctionPropsMap, HLM, CGM);
  1012. }
  1013. } // namespace
  1014. namespace {
  1015. // For case like:
  1016. // cbuffer A {
  1017. // float a;
  1018. // int b;
  1019. //}
  1020. //
  1021. // const static struct {
  1022. // float a;
  1023. // int b;
  1024. //} ST = { a, b };
  1025. // Replace user of ST with a and b.
  1026. bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  1027. std::vector<Constant *> &InitList,
  1028. IRBuilder<> &Builder) {
  1029. if (GEP->getNumIndices() < 2) {
  1030. // Don't use sub element.
  1031. return false;
  1032. }
  1033. SmallVector<Value *, 4> idxList;
  1034. auto iter = GEP->idx_begin();
  1035. idxList.emplace_back(*(iter++));
  1036. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  1037. DXASSERT(subIdx, "else dynamic indexing on struct field");
  1038. unsigned subIdxImm = subIdx->getLimitedValue();
  1039. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  1040. Constant *subPtr = InitList[subIdxImm];
  1041. // Move every idx to idxList except idx for InitList.
  1042. while (iter != GEP->idx_end()) {
  1043. idxList.emplace_back(*(iter++));
  1044. }
  1045. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  1046. GEP->replaceAllUsesWith(NewGEP);
  1047. return true;
  1048. }
  1049. } // namespace
  1050. namespace CGHLSLMSHelper {
  1051. void ReplaceConstStaticGlobals(
  1052. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  1053. &staticConstGlobalInitListMap,
  1054. std::unordered_map<GlobalVariable *, Function *>
  1055. &staticConstGlobalCtorMap) {
  1056. for (auto &iter : staticConstGlobalInitListMap) {
  1057. GlobalVariable *GV = iter.first;
  1058. std::vector<Constant *> &InitList = iter.second;
  1059. LLVMContext &Ctx = GV->getContext();
  1060. // Do the replace.
  1061. bool bPass = true;
  1062. for (User *U : GV->users()) {
  1063. IRBuilder<> Builder(Ctx);
  1064. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  1065. Builder.SetInsertPoint(GEPInst);
  1066. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst),
  1067. InitList, Builder);
  1068. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1069. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  1070. } else {
  1071. DXASSERT(false, "invalid user of const static global");
  1072. }
  1073. }
  1074. // Clear the Ctor which is useless now.
  1075. if (bPass) {
  1076. Function *Ctor = staticConstGlobalCtorMap[GV];
  1077. Ctor->getBasicBlockList().clear();
  1078. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  1079. IRBuilder<> Builder(Entry);
  1080. Builder.CreateRetVoid();
  1081. }
  1082. }
  1083. }
  1084. } // namespace CGHLSLMSHelper
  1085. namespace {
  1086. Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy,
  1087. IRBuilder<> &Builder) {
  1088. if (ToTy->isVectorTy()) {
  1089. unsigned vecSize = ToTy->getVectorNumElements();
  1090. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1091. Value *V = Builder.CreateLoad(Ptr);
  1092. // ScalarToVec1Splat
  1093. // Change scalar into vec1.
  1094. Value *Vec1 = UndefValue::get(ToTy);
  1095. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  1096. } else if (vecSize == 1 && FromTy->isIntegerTy() &&
  1097. ToTy->getVectorElementType()->isIntegerTy(1)) {
  1098. // load(bitcast i32* to <1 x i1>*)
  1099. // Rewrite to
  1100. // insertelement(icmp ne (load i32*), 0)
  1101. Value *IntV = Builder.CreateLoad(Ptr);
  1102. Value *BoolV = Builder.CreateICmpNE(
  1103. IntV, ConstantInt::get(IntV->getType(), 0), "tobool");
  1104. Value *Vec1 = UndefValue::get(ToTy);
  1105. return Builder.CreateInsertElement(Vec1, BoolV, (uint64_t)0);
  1106. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1107. Value *V = Builder.CreateLoad(Ptr);
  1108. // VectorTrunc
  1109. // Change vector into vec1.
  1110. int mask[] = {0};
  1111. return Builder.CreateShuffleVector(V, V, mask);
  1112. } else if (FromTy->isArrayTy()) {
  1113. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1114. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1115. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1116. // ArrayToVector.
  1117. Value *NewLd = UndefValue::get(ToTy);
  1118. Value *zeroIdx = Builder.getInt32(0);
  1119. for (unsigned i = 0; i < vecSize; i++) {
  1120. Value *GEP =
  1121. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1122. Value *Elt = Builder.CreateLoad(GEP);
  1123. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  1124. }
  1125. return NewLd;
  1126. }
  1127. }
  1128. } else if (FromTy == Builder.getInt1Ty()) {
  1129. Value *V = Builder.CreateLoad(Ptr);
  1130. // BoolCast
  1131. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1132. return Builder.CreateZExt(V, ToTy);
  1133. }
  1134. return nullptr;
  1135. }
  1136. Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy,
  1137. IRBuilder<> &Builder) {
  1138. if (ToTy->isVectorTy()) {
  1139. unsigned vecSize = ToTy->getVectorNumElements();
  1140. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1141. // ScalarToVec1Splat
  1142. // Change vec1 back to scalar.
  1143. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  1144. return Elt;
  1145. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1146. // VectorTrunc
  1147. // Change vec1 into vector.
  1148. // Should not happen.
  1149. // Reported error at Sema::ImpCastExprToType.
  1150. DXASSERT_NOMSG(0);
  1151. } else if (FromTy->isArrayTy()) {
  1152. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1153. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1154. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1155. // ArrayToVector.
  1156. Value *zeroIdx = Builder.getInt32(0);
  1157. for (unsigned i = 0; i < vecSize; i++) {
  1158. Value *Elt = Builder.CreateExtractElement(V, i);
  1159. Value *GEP =
  1160. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1161. Builder.CreateStore(Elt, GEP);
  1162. }
  1163. // The store already done.
  1164. // Return null to ignore use of the return value.
  1165. return nullptr;
  1166. }
  1167. }
  1168. } else if (FromTy == Builder.getInt1Ty()) {
  1169. // BoolCast
  1170. // Change i1 to ToTy.
  1171. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1172. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  1173. return CastV;
  1174. }
  1175. return nullptr;
  1176. }
  1177. bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy,
  1178. Value *Ptr) {
  1179. IRBuilder<> Builder(LI);
  1180. // Cast FromLd to ToTy.
  1181. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  1182. if (CastV) {
  1183. LI->replaceAllUsesWith(CastV);
  1184. return true;
  1185. } else {
  1186. return false;
  1187. }
  1188. }
  1189. bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy,
  1190. Value *Ptr) {
  1191. IRBuilder<> Builder(SI);
  1192. Value *V = SI->getValueOperand();
  1193. // Cast Val to FromTy.
  1194. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  1195. if (CastV) {
  1196. Builder.CreateStore(CastV, Ptr);
  1197. return true;
  1198. } else {
  1199. return false;
  1200. }
  1201. }
  1202. bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy,
  1203. Value *Ptr) {
  1204. if (ToTy->isVectorTy()) {
  1205. unsigned vecSize = ToTy->getVectorNumElements();
  1206. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1207. // ScalarToVec1Splat
  1208. GEP->replaceAllUsesWith(Ptr);
  1209. return true;
  1210. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1211. // VectorTrunc
  1212. DXASSERT_NOMSG(
  1213. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  1214. IRBuilder<> Builder(FromTy->getContext());
  1215. if (Instruction *I = dyn_cast<Instruction>(GEP))
  1216. Builder.SetInsertPoint(I);
  1217. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  1218. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  1219. GEP->replaceAllUsesWith(NewGEP);
  1220. return true;
  1221. } else if (FromTy->isArrayTy()) {
  1222. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1223. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1224. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1225. // ArrayToVector.
  1226. }
  1227. }
  1228. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  1229. // BoolCast
  1230. }
  1231. return false;
  1232. }
  1233. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  1234. void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  1235. Value *Ptr = BC->getOperand(0);
  1236. llvm::Type *FromTy = Ptr->getType();
  1237. llvm::Type *ToTy = BC->getType();
  1238. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  1239. return;
  1240. FromTy = FromTy->getPointerElementType();
  1241. ToTy = ToTy->getPointerElementType();
  1242. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  1243. bool GEPCreated = false;
  1244. if (FromTy->isStructTy()) {
  1245. IRBuilder<> Builder(FromTy->getContext());
  1246. if (Instruction *I = dyn_cast<Instruction>(BC))
  1247. Builder.SetInsertPoint(I);
  1248. Value *zeroIdx = Builder.getInt32(0);
  1249. unsigned nestLevel = 1;
  1250. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  1251. if (ST->getNumElements() == 0)
  1252. break;
  1253. FromTy = ST->getElementType(0);
  1254. nestLevel++;
  1255. }
  1256. std::vector<Value *> idxList(nestLevel, zeroIdx);
  1257. Ptr = Builder.CreateGEP(Ptr, idxList);
  1258. GEPCreated = true;
  1259. }
  1260. for (User *U : BC->users()) {
  1261. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1262. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  1263. LI->dropAllReferences();
  1264. deadInsts.insert(LI);
  1265. }
  1266. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1267. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  1268. SI->dropAllReferences();
  1269. deadInsts.insert(SI);
  1270. }
  1271. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1272. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  1273. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1274. I->dropAllReferences();
  1275. deadInsts.insert(I);
  1276. }
  1277. } else if (dyn_cast<CallInst>(U)) {
  1278. // Skip function call.
  1279. } else if (dyn_cast<BitCastInst>(U)) {
  1280. // Skip bitcast.
  1281. } else if (dyn_cast<AddrSpaceCastInst>(U)) {
  1282. // Skip addrspacecast.
  1283. } else {
  1284. DXASSERT(0, "not support yet");
  1285. }
  1286. }
  1287. // We created a GEP instruction but didn't end up consuming it, so delete it.
  1288. if (GEPCreated && Ptr->use_empty()) {
  1289. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
  1290. GEP->eraseFromParent();
  1291. else
  1292. cast<Constant>(Ptr)->destroyConstant();
  1293. }
  1294. }
  1295. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  1296. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  1297. typedef APInt(__cdecl *IntBinaryEvalFuncType)(const APInt &, const APInt &);
  1298. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  1299. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  1300. typedef APInt(__cdecl *IntTernaryEvalFuncType)(const APInt &, const APInt &, const APInt &);
  1301. typedef float(__cdecl *FloatTernaryEvalFuncType)(float, float, float);
  1302. typedef double(__cdecl *DoubleTernaryEvalFuncType)(double, double, double);
  1303. Value *EvalUnaryIntrinsic(ConstantFP *fpV, FloatUnaryEvalFuncType floatEvalFunc,
  1304. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1305. llvm::Type *Ty = fpV->getType();
  1306. Value *Result = nullptr;
  1307. if (Ty->isDoubleTy()) {
  1308. double dV = fpV->getValueAPF().convertToDouble();
  1309. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  1310. Result = dResult;
  1311. } else {
  1312. DXASSERT_NOMSG(Ty->isFloatTy());
  1313. float fV = fpV->getValueAPF().convertToFloat();
  1314. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  1315. Result = dResult;
  1316. }
  1317. return Result;
  1318. }
  1319. Value *EvalBinaryIntrinsic(Constant *cV0, Constant *cV1,
  1320. FloatBinaryEvalFuncType floatEvalFunc,
  1321. DoubleBinaryEvalFuncType doubleEvalFunc,
  1322. IntBinaryEvalFuncType intEvalFunc) {
  1323. llvm::Type *Ty = cV0->getType();
  1324. Value *Result = nullptr;
  1325. if (Ty->isDoubleTy()) {
  1326. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1327. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1328. double dV0 = fpV0->getValueAPF().convertToDouble();
  1329. double dV1 = fpV1->getValueAPF().convertToDouble();
  1330. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  1331. Result = dResult;
  1332. } else if (Ty->isFloatTy()) {
  1333. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1334. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1335. float fV0 = fpV0->getValueAPF().convertToFloat();
  1336. float fV1 = fpV1->getValueAPF().convertToFloat();
  1337. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  1338. Result = dResult;
  1339. } else {
  1340. DXASSERT_NOMSG(Ty->isIntegerTy());
  1341. DXASSERT_NOMSG(intEvalFunc);
  1342. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1343. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1344. const APInt &iV0 = ciV0->getValue();
  1345. const APInt &iV1 = ciV1->getValue();
  1346. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1));
  1347. Result = dResult;
  1348. }
  1349. return Result;
  1350. }
  1351. Value *EvalTernaryIntrinsic(Constant *cV0, Constant *cV1, Constant *cV2,
  1352. FloatTernaryEvalFuncType floatEvalFunc,
  1353. DoubleTernaryEvalFuncType doubleEvalFunc,
  1354. IntTernaryEvalFuncType intEvalFunc) {
  1355. llvm::Type *Ty = cV0->getType();
  1356. Value *Result = nullptr;
  1357. if (Ty->isDoubleTy()) {
  1358. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1359. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1360. ConstantFP *fpV2 = cast<ConstantFP>(cV2);
  1361. double dV0 = fpV0->getValueAPF().convertToDouble();
  1362. double dV1 = fpV1->getValueAPF().convertToDouble();
  1363. double dV2 = fpV2->getValueAPF().convertToDouble();
  1364. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1, dV2));
  1365. Result = dResult;
  1366. } else if (Ty->isFloatTy()) {
  1367. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1368. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1369. ConstantFP *fpV2 = cast<ConstantFP>(cV2);
  1370. float fV0 = fpV0->getValueAPF().convertToFloat();
  1371. float fV1 = fpV1->getValueAPF().convertToFloat();
  1372. float fV2 = fpV2->getValueAPF().convertToFloat();
  1373. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1, fV2));
  1374. Result = dResult;
  1375. } else {
  1376. DXASSERT_NOMSG(Ty->isIntegerTy());
  1377. DXASSERT_NOMSG(intEvalFunc);
  1378. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1379. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1380. ConstantInt *ciV2 = cast<ConstantInt>(cV2);
  1381. const APInt &iV0 = ciV0->getValue();
  1382. const APInt &iV1 = ciV1->getValue();
  1383. const APInt &iV2 = ciV2->getValue();
  1384. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1, iV2));
  1385. Result = dResult;
  1386. }
  1387. return Result;
  1388. }
  1389. Value *EvalUnaryIntrinsic(CallInst *CI, FloatUnaryEvalFuncType floatEvalFunc,
  1390. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1391. Value *V = CI->getArgOperand(0);
  1392. llvm::Type *Ty = CI->getType();
  1393. Value *Result = nullptr;
  1394. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1395. Result = UndefValue::get(Ty);
  1396. Constant *CV = cast<Constant>(V);
  1397. IRBuilder<> Builder(CI);
  1398. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1399. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  1400. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1401. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1402. }
  1403. } else {
  1404. ConstantFP *fpV = cast<ConstantFP>(V);
  1405. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1406. }
  1407. CI->replaceAllUsesWith(Result);
  1408. CI->eraseFromParent();
  1409. return Result;
  1410. }
  1411. Value *EvalBinaryIntrinsic(CallInst *CI, FloatBinaryEvalFuncType floatEvalFunc,
  1412. DoubleBinaryEvalFuncType doubleEvalFunc,
  1413. IntBinaryEvalFuncType intEvalFunc = nullptr) {
  1414. Value *V0 = CI->getArgOperand(0);
  1415. Value *V1 = CI->getArgOperand(1);
  1416. llvm::Type *Ty = CI->getType();
  1417. Value *Result = nullptr;
  1418. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1419. Result = UndefValue::get(Ty);
  1420. Constant *CV0 = cast<Constant>(V0);
  1421. Constant *CV1 = cast<Constant>(V1);
  1422. IRBuilder<> Builder(CI);
  1423. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1424. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1425. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1426. Value *EltResult = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc,
  1427. doubleEvalFunc, intEvalFunc);
  1428. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1429. }
  1430. } else {
  1431. Constant *cV0 = cast<Constant>(V0);
  1432. Constant *cV1 = cast<Constant>(V1);
  1433. Result = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc, doubleEvalFunc,
  1434. intEvalFunc);
  1435. }
  1436. CI->replaceAllUsesWith(Result);
  1437. CI->eraseFromParent();
  1438. return Result;
  1439. CI->eraseFromParent();
  1440. return Result;
  1441. }
  1442. Value *EvalTernaryIntrinsic(CallInst *CI, FloatTernaryEvalFuncType floatEvalFunc,
  1443. DoubleTernaryEvalFuncType doubleEvalFunc,
  1444. IntTernaryEvalFuncType intEvalFunc = nullptr) {
  1445. Value *V0 = CI->getArgOperand(0);
  1446. Value *V1 = CI->getArgOperand(1);
  1447. Value *V2 = CI->getArgOperand(2);
  1448. llvm::Type *Ty = CI->getType();
  1449. Value *Result = nullptr;
  1450. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1451. Result = UndefValue::get(Ty);
  1452. Constant *CV0 = cast<Constant>(V0);
  1453. Constant *CV1 = cast<Constant>(V1);
  1454. Constant *CV2 = cast<Constant>(V2);
  1455. IRBuilder<> Builder(CI);
  1456. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1457. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1458. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1459. Constant *cV2 = cast<Constant>(CV2->getAggregateElement(i));
  1460. Value *EltResult = EvalTernaryIntrinsic(cV0, cV1, cV2, floatEvalFunc,
  1461. doubleEvalFunc, intEvalFunc);
  1462. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1463. }
  1464. } else {
  1465. Constant *cV0 = cast<Constant>(V0);
  1466. Constant *cV1 = cast<Constant>(V1);
  1467. Constant *cV2 = cast<Constant>(V2);
  1468. Result = EvalTernaryIntrinsic(cV0, cV1, cV2, floatEvalFunc, doubleEvalFunc,
  1469. intEvalFunc);
  1470. }
  1471. CI->replaceAllUsesWith(Result);
  1472. CI->eraseFromParent();
  1473. return Result;
  1474. CI->eraseFromParent();
  1475. return Result;
  1476. }
  1477. void SimpleTransformForHLDXIRInst(Instruction *I, SmallInstSet &deadInsts) {
  1478. unsigned opcode = I->getOpcode();
  1479. switch (opcode) {
  1480. case Instruction::BitCast: {
  1481. BitCastOperator *BCI = cast<BitCastOperator>(I);
  1482. SimplifyBitCast(BCI, deadInsts);
  1483. } break;
  1484. case Instruction::Load: {
  1485. LoadInst *ldInst = cast<LoadInst>(I);
  1486. DXASSERT(!HLMatrixType::isa(ldInst->getType()),
  1487. "matrix load should use HL LdStMatrix");
  1488. Value *Ptr = ldInst->getPointerOperand();
  1489. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  1490. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1491. SimplifyBitCast(BCO, deadInsts);
  1492. }
  1493. }
  1494. } break;
  1495. case Instruction::Store: {
  1496. StoreInst *stInst = cast<StoreInst>(I);
  1497. Value *V = stInst->getValueOperand();
  1498. DXASSERT_LOCALVAR(V, !HLMatrixType::isa(V->getType()),
  1499. "matrix store should use HL LdStMatrix");
  1500. Value *Ptr = stInst->getPointerOperand();
  1501. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  1502. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1503. SimplifyBitCast(BCO, deadInsts);
  1504. }
  1505. }
  1506. } break;
  1507. case Instruction::LShr:
  1508. case Instruction::AShr:
  1509. case Instruction::Shl: {
  1510. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  1511. Value *op2 = BO->getOperand(1);
  1512. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  1513. unsigned bitWidth = Ty->getBitWidth();
  1514. // Clamp op2 to 0 ~ bitWidth-1
  1515. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  1516. unsigned iOp2 = cOp2->getLimitedValue();
  1517. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  1518. if (iOp2 != clampedOp2) {
  1519. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  1520. }
  1521. } else {
  1522. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  1523. IRBuilder<> Builder(I);
  1524. op2 = Builder.CreateAnd(op2, mask);
  1525. BO->setOperand(1, op2);
  1526. }
  1527. } break;
  1528. }
  1529. }
  1530. } // namespace
  1531. namespace CGHLSLMSHelper {
  1532. Value *TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp,
  1533. unsigned hlslVersion) {
  1534. switch (intriOp) {
  1535. case IntrinsicOp::IOP_tan: {
  1536. return EvalUnaryIntrinsic(CI, tanf, tan);
  1537. } break;
  1538. case IntrinsicOp::IOP_tanh: {
  1539. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  1540. } break;
  1541. case IntrinsicOp::IOP_sin: {
  1542. return EvalUnaryIntrinsic(CI, sinf, sin);
  1543. } break;
  1544. case IntrinsicOp::IOP_sinh: {
  1545. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  1546. } break;
  1547. case IntrinsicOp::IOP_cos: {
  1548. return EvalUnaryIntrinsic(CI, cosf, cos);
  1549. } break;
  1550. case IntrinsicOp::IOP_cosh: {
  1551. return EvalUnaryIntrinsic(CI, coshf, cosh);
  1552. } break;
  1553. case IntrinsicOp::IOP_asin: {
  1554. return EvalUnaryIntrinsic(CI, asinf, asin);
  1555. } break;
  1556. case IntrinsicOp::IOP_acos: {
  1557. return EvalUnaryIntrinsic(CI, acosf, acos);
  1558. } break;
  1559. case IntrinsicOp::IOP_atan: {
  1560. return EvalUnaryIntrinsic(CI, atanf, atan);
  1561. } break;
  1562. case IntrinsicOp::IOP_atan2: {
  1563. Value *V0 = CI->getArgOperand(0);
  1564. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  1565. Value *V1 = CI->getArgOperand(1);
  1566. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  1567. llvm::Type *Ty = CI->getType();
  1568. Value *Result = nullptr;
  1569. if (Ty->isDoubleTy()) {
  1570. double dV0 = fpV0->getValueAPF().convertToDouble();
  1571. double dV1 = fpV1->getValueAPF().convertToDouble();
  1572. Value *atanV = ConstantFP::get(CI->getType(), atan2(dV0, dV1));
  1573. CI->replaceAllUsesWith(atanV);
  1574. Result = atanV;
  1575. } else {
  1576. DXASSERT_NOMSG(Ty->isFloatTy());
  1577. float fV0 = fpV0->getValueAPF().convertToFloat();
  1578. float fV1 = fpV1->getValueAPF().convertToFloat();
  1579. Value *atanV = ConstantFP::get(CI->getType(), atan2f(fV0, fV1));
  1580. CI->replaceAllUsesWith(atanV);
  1581. Result = atanV;
  1582. }
  1583. CI->eraseFromParent();
  1584. return Result;
  1585. } break;
  1586. case IntrinsicOp::IOP_sqrt: {
  1587. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  1588. } break;
  1589. case IntrinsicOp::IOP_rsqrt: {
  1590. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  1591. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  1592. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  1593. } break;
  1594. case IntrinsicOp::IOP_exp: {
  1595. return EvalUnaryIntrinsic(CI, expf, exp);
  1596. } break;
  1597. case IntrinsicOp::IOP_exp2: {
  1598. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  1599. } break;
  1600. case IntrinsicOp::IOP_log: {
  1601. return EvalUnaryIntrinsic(CI, logf, log);
  1602. } break;
  1603. case IntrinsicOp::IOP_log10: {
  1604. return EvalUnaryIntrinsic(CI, log10f, log10);
  1605. } break;
  1606. case IntrinsicOp::IOP_log2: {
  1607. return EvalUnaryIntrinsic(CI, log2f, log2);
  1608. } break;
  1609. case IntrinsicOp::IOP_pow: {
  1610. return EvalBinaryIntrinsic(CI, powf, pow);
  1611. } break;
  1612. case IntrinsicOp::IOP_max: {
  1613. auto maxF = [](float a, float b) -> float { return a > b ? a : b; };
  1614. auto maxD = [](double a, double b) -> double { return a > b ? a : b; };
  1615. auto imaxI = [](const APInt &a, const APInt &b) -> APInt {
  1616. return a.sgt(b) ? a : b;
  1617. };
  1618. return EvalBinaryIntrinsic(CI, maxF, maxD, imaxI);
  1619. } break;
  1620. case IntrinsicOp::IOP_min: {
  1621. auto minF = [](float a, float b) -> float { return a < b ? a : b; };
  1622. auto minD = [](double a, double b) -> double { return a < b ? a : b; };
  1623. auto iminI = [](const APInt &a, const APInt &b) -> APInt {
  1624. return a.slt(b) ? a : b;
  1625. };
  1626. return EvalBinaryIntrinsic(CI, minF, minD, iminI);
  1627. } break;
  1628. case IntrinsicOp::IOP_umax: {
  1629. DXASSERT_NOMSG(
  1630. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1631. auto umaxI = [](const APInt &a, const APInt &b) -> APInt {
  1632. return a.ugt(b) ? a : b;
  1633. };
  1634. return EvalBinaryIntrinsic(CI, nullptr, nullptr, umaxI);
  1635. } break;
  1636. case IntrinsicOp::IOP_umin: {
  1637. DXASSERT_NOMSG(
  1638. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1639. auto uminI = [](const APInt &a, const APInt &b) -> APInt {
  1640. return a.ult(b) ? a : b;
  1641. };
  1642. return EvalBinaryIntrinsic(CI, nullptr, nullptr, uminI);
  1643. } break;
  1644. case IntrinsicOp::IOP_rcp: {
  1645. auto rcpF = [](float v) -> float { return 1.0 / v; };
  1646. auto rcpD = [](double v) -> double { return 1.0 / v; };
  1647. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  1648. } break;
  1649. case IntrinsicOp::IOP_ceil: {
  1650. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  1651. } break;
  1652. case IntrinsicOp::IOP_floor: {
  1653. return EvalUnaryIntrinsic(CI, floorf, floor);
  1654. } break;
  1655. case IntrinsicOp::IOP_round: {
  1656. // round intrinsic could exhibit different behaviour for constant and
  1657. // runtime evaluations. E.g., for round(0.5): constant evaluation results in
  1658. // 1 (away from zero rounding), while runtime evaluation results in 0
  1659. // (nearest even rounding).
  1660. //
  1661. // For back compat, DXC still preserves the above behavior for language
  1662. // versions 2016 or below. However, for newer language versions, DXC now
  1663. // always use nearest even for round() intrinsic in all cases.
  1664. if (hlslVersion <= 2016) {
  1665. return EvalUnaryIntrinsic(CI, roundf, round);
  1666. } else {
  1667. auto roundingMode = fegetround();
  1668. fesetround(FE_TONEAREST);
  1669. Value *result = EvalUnaryIntrinsic(CI, nearbyintf, nearbyint);
  1670. fesetround(roundingMode);
  1671. return result;
  1672. }
  1673. } break;
  1674. case IntrinsicOp::IOP_trunc: {
  1675. return EvalUnaryIntrinsic(CI, truncf, trunc);
  1676. } break;
  1677. case IntrinsicOp::IOP_frac: {
  1678. auto fracF = [](float v) -> float { return v - floor(v); };
  1679. auto fracD = [](double v) -> double { return v - floor(v); };
  1680. return EvalUnaryIntrinsic(CI, fracF, fracD);
  1681. } break;
  1682. case IntrinsicOp::IOP_isnan: {
  1683. Value *V = CI->getArgOperand(0);
  1684. ConstantFP *fV = cast<ConstantFP>(V);
  1685. bool isNan = fV->getValueAPF().isNaN();
  1686. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  1687. CI->replaceAllUsesWith(cNan);
  1688. CI->eraseFromParent();
  1689. return cNan;
  1690. } break;
  1691. case IntrinsicOp::IOP_clamp: {
  1692. auto clampF = [](float a, float b, float c) {
  1693. return a < b ? b : a > c ? c : a;
  1694. };
  1695. auto clampD = [](double a, double b, double c) {
  1696. return a < b ? b : a > c ? c : a;
  1697. };
  1698. auto clampI = [](const APInt &a, const APInt &b, const APInt &c) -> APInt {
  1699. return a.slt(b) ? b : a.sgt(c) ? c : a;
  1700. };
  1701. return EvalTernaryIntrinsic(CI, clampF, clampD, clampI);
  1702. } break;
  1703. default:
  1704. return nullptr;
  1705. }
  1706. }
  1707. // Do simple transform to make later lower pass easier.
  1708. void SimpleTransformForHLDXIR(llvm::Module *pM) {
  1709. SmallInstSet deadInsts;
  1710. for (Function &F : pM->functions()) {
  1711. for (BasicBlock &BB : F.getBasicBlockList()) {
  1712. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end();) {
  1713. Instruction *I = (Iter++);
  1714. if (deadInsts.count(I))
  1715. continue; // Skip dead instructions
  1716. SimpleTransformForHLDXIRInst(I, deadInsts);
  1717. }
  1718. }
  1719. }
  1720. for (Instruction *I : deadInsts)
  1721. I->dropAllReferences();
  1722. for (Instruction *I : deadInsts)
  1723. I->eraseFromParent();
  1724. deadInsts.clear();
  1725. for (GlobalVariable &GV : pM->globals()) {
  1726. if (dxilutil::IsStaticGlobal(&GV)) {
  1727. for (User *U : GV.users()) {
  1728. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  1729. SimplifyBitCast(BCO, deadInsts);
  1730. }
  1731. }
  1732. }
  1733. }
  1734. for (Instruction *I : deadInsts)
  1735. I->dropAllReferences();
  1736. for (Instruction *I : deadInsts)
  1737. I->eraseFromParent();
  1738. }
  1739. } // namespace CGHLSLMSHelper
  1740. namespace {
  1741. unsigned RoundToAlign(unsigned num, unsigned mod) {
  1742. // round num to next highest mod
  1743. if (mod != 0)
  1744. return mod * ((num + mod - 1) / mod);
  1745. return num;
  1746. }
  1747. // Retrieve the last scalar or vector element type.
  1748. // This has to be recursive for the nasty empty struct case.
  1749. // returns true if found, false if we must backtrack.
  1750. bool RetrieveLastElementType(Type *Ty, Type *&EltTy) {
  1751. if (Ty->isStructTy()) {
  1752. if (Ty->getStructNumElements() == 0)
  1753. return false;
  1754. for (unsigned i = Ty->getStructNumElements(); i > 0; --i) {
  1755. if (RetrieveLastElementType(Ty->getStructElementType(i - 1), EltTy))
  1756. return true;
  1757. }
  1758. } else if (Ty->isArrayTy()) {
  1759. if (RetrieveLastElementType(Ty->getArrayElementType(), EltTy))
  1760. return true;
  1761. } else if ((Ty->isVectorTy() || Ty->isSingleValueType())) {
  1762. EltTy = Ty->getScalarType();
  1763. return true;
  1764. }
  1765. return false;
  1766. }
  1767. // Here the size is CB size.
  1768. // Offset still needs to be aligned based on type since this
  1769. // is the legacy cbuffer global path.
  1770. unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty,
  1771. bool bRowMajor, bool bMinPrecMode,
  1772. bool &bCurRowIsMinPrec) {
  1773. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  1774. bool bNeedNewRow = Ty->isArrayTy();
  1775. // In min-precision mode, a new row is needed when
  1776. // going into or out of min-precision component type.
  1777. if (!bNeedNewRow) {
  1778. bool bMinPrec = false;
  1779. if (Ty->isStructTy()) {
  1780. if (HLMatrixType mat = HLMatrixType::dyn_cast(Ty)) {
  1781. bNeedNewRow |= !bRowMajor && mat.getNumColumns() > 1;
  1782. bNeedNewRow |= bRowMajor && mat.getNumRows() > 1;
  1783. bMinPrec = bMinPrecMode &&
  1784. mat.getElementType(false)->getScalarSizeInBits() < 32;
  1785. } else {
  1786. bNeedNewRow = true;
  1787. if (bMinPrecMode) {
  1788. // Need to get min-prec of last element of structure,
  1789. // in case we pack something else into the end.
  1790. Type *EltTy = nullptr;
  1791. if (RetrieveLastElementType(Ty, EltTy))
  1792. bCurRowIsMinPrec = EltTy->getScalarSizeInBits() < 32;
  1793. }
  1794. }
  1795. } else {
  1796. DXASSERT_NOMSG(Ty->isVectorTy() || Ty->isSingleValueType());
  1797. // vector or scalar
  1798. bMinPrec = bMinPrecMode && Ty->getScalarSizeInBits() < 32;
  1799. }
  1800. if (bMinPrecMode) {
  1801. bNeedNewRow |= bCurRowIsMinPrec != bMinPrec;
  1802. bCurRowIsMinPrec = bMinPrec;
  1803. }
  1804. }
  1805. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  1806. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes,
  1807. bNeedNewRow);
  1808. }
  1809. unsigned AllocateDxilConstantBuffer(
  1810. HLCBuffer &CB,
  1811. std::unordered_map<Constant *, DxilFieldAnnotation> &constVarAnnotationMap,
  1812. bool bMinPrecMode) {
  1813. unsigned offset = 0;
  1814. // Scan user allocated constants first.
  1815. // Update offset.
  1816. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1817. if (C->GetLowerBound() == UINT_MAX)
  1818. continue;
  1819. unsigned size = C->GetRangeSize();
  1820. unsigned nextOffset = size + C->GetLowerBound();
  1821. if (offset < nextOffset)
  1822. offset = nextOffset;
  1823. }
  1824. // Alloc after user allocated constants.
  1825. bool bCurRowIsMinPrec = false;
  1826. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1827. if (C->GetLowerBound() != UINT_MAX)
  1828. continue;
  1829. unsigned size = C->GetRangeSize();
  1830. llvm::Type *Ty = C->GetHLSLType()->getPointerElementType();
  1831. auto fieldAnnotation = constVarAnnotationMap.at(C->GetGlobalSymbol());
  1832. bool bRowMajor = HLMatrixType::isa(Ty)
  1833. ? fieldAnnotation.GetMatrixAnnotation().Orientation ==
  1834. MatrixOrientation::RowMajor
  1835. : false;
  1836. // Align offset.
  1837. offset = AlignCBufferOffset(offset, size, Ty, bRowMajor, bMinPrecMode,
  1838. bCurRowIsMinPrec);
  1839. if (C->GetLowerBound() == UINT_MAX) {
  1840. C->SetLowerBound(offset);
  1841. }
  1842. offset += size;
  1843. }
  1844. return offset;
  1845. }
  1846. void AllocateDxilConstantBuffers(
  1847. HLModule &HLM, std::unordered_map<Constant *, DxilFieldAnnotation>
  1848. &constVarAnnotationMap) {
  1849. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1850. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1851. unsigned size = AllocateDxilConstantBuffer(
  1852. CB, constVarAnnotationMap, HLM.GetHLOptions().bUseMinPrecision);
  1853. CB.SetSize(size);
  1854. }
  1855. }
  1856. } // namespace
  1857. namespace {
  1858. void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  1859. IRBuilder<> &Builder) {
  1860. for (auto U = V->user_begin(); U != V->user_end();) {
  1861. User *user = *(U++);
  1862. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1863. if (I->getParent()->getParent() == F) {
  1864. // replace use with GEP if in F
  1865. if (BitCastInst *BCI = dyn_cast<BitCastInst>(I)) {
  1866. if (BCI->getType() == NewV->getType()) {
  1867. I->replaceAllUsesWith(NewV);
  1868. I->eraseFromParent();
  1869. continue;
  1870. }
  1871. }
  1872. I->replaceUsesOfWith(V, NewV);
  1873. }
  1874. } else {
  1875. // For constant operator, create local clone which use GEP.
  1876. // Only support GEP and bitcast.
  1877. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1878. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  1879. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  1880. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  1881. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1882. // Change the init val into NewV with Store.
  1883. GV->setInitializer(nullptr);
  1884. Builder.CreateStore(NewV, GV);
  1885. } else {
  1886. // Must be bitcast here.
  1887. BitCastOperator *BC = cast<BitCastOperator>(user);
  1888. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  1889. ReplaceUseInFunction(BC, NewBC, F, Builder);
  1890. }
  1891. }
  1892. }
  1893. }
  1894. void MarkUsedFunctionForConst(Value *V,
  1895. std::unordered_set<Function *> &usedFunc) {
  1896. for (auto U = V->user_begin(); U != V->user_end();) {
  1897. User *user = *(U++);
  1898. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1899. Function *F = I->getParent()->getParent();
  1900. usedFunc.insert(F);
  1901. } else {
  1902. // For constant operator, create local clone which use GEP.
  1903. // Only support GEP and bitcast.
  1904. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1905. MarkUsedFunctionForConst(GEPOp, usedFunc);
  1906. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1907. MarkUsedFunctionForConst(GV, usedFunc);
  1908. } else {
  1909. // Must be bitcast here.
  1910. BitCastOperator *BC = cast<BitCastOperator>(user);
  1911. MarkUsedFunctionForConst(BC, usedFunc);
  1912. }
  1913. }
  1914. }
  1915. }
  1916. bool CreateCBufferVariable(HLCBuffer &CB, HLModule &HLM, llvm::Type *HandleTy) {
  1917. bool bUsed = false;
  1918. // Build Struct for CBuffer.
  1919. SmallVector<llvm::Type *, 4> Elements;
  1920. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1921. Value *GV = C->GetGlobalSymbol();
  1922. if (!GV->use_empty())
  1923. bUsed = true;
  1924. // Global variable must be pointer type.
  1925. llvm::Type *Ty = C->GetHLSLType()->getPointerElementType();
  1926. Elements.emplace_back(Ty);
  1927. }
  1928. // Don't create CBuffer variable for unused cbuffer.
  1929. if (!bUsed)
  1930. return false;
  1931. llvm::Module &M = *HLM.GetModule();
  1932. bool isCBArray = CB.IsArray();
  1933. llvm::GlobalVariable *cbGV = nullptr;
  1934. llvm::Type *cbTy = nullptr;
  1935. unsigned cbIndexDepth = 0;
  1936. if (!isCBArray) {
  1937. if (CB.IsView()) {
  1938. llvm::StructType *CBStructTy =
  1939. llvm::StructType::create(CB.GetResultType(), CB.GetGlobalName());
  1940. cbGV = new llvm::GlobalVariable(M, CBStructTy,
  1941. /*IsConstant*/ true,
  1942. llvm::GlobalValue::ExternalLinkage,
  1943. /*InitVal*/ nullptr, CB.GetGlobalName());
  1944. cbTy = cbGV->getType();
  1945. } else {
  1946. llvm::StructType *CBStructTy =
  1947. llvm::StructType::create(Elements, CB.GetGlobalName());
  1948. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  1949. llvm::GlobalValue::ExternalLinkage,
  1950. /*InitVal*/ nullptr, CB.GetGlobalName());
  1951. cbTy = cbGV->getType();
  1952. }
  1953. } else {
  1954. // For array of ConstantBuffer, create array of struct instead of struct of
  1955. // array.
  1956. DXASSERT(CB.GetConstants().size() == 1,
  1957. "ConstantBuffer should have 1 constant");
  1958. llvm::Type *CBEltTy =
  1959. CB.GetConstants()[0]->GetHLSLType()->getPointerElementType()->getArrayElementType();
  1960. cbIndexDepth = 1;
  1961. while (CBEltTy->isArrayTy()) {
  1962. CBEltTy = CBEltTy->getArrayElementType();
  1963. cbIndexDepth++;
  1964. }
  1965. // Add one level struct type to match normal case.
  1966. llvm::StructType *CBStructTy =
  1967. llvm::StructType::create({CB.GetResultType()}, CB.GetGlobalName());
  1968. llvm::ArrayType *CBArrayTy =
  1969. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  1970. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  1971. llvm::GlobalValue::ExternalLinkage,
  1972. /*InitVal*/ nullptr, CB.GetGlobalName());
  1973. cbTy = llvm::PointerType::get(CBStructTy,
  1974. cbGV->getType()->getPointerAddressSpace());
  1975. }
  1976. CB.SetGlobalSymbol(cbGV);
  1977. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  1978. llvm::Type *idxTy = opcodeTy;
  1979. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  1980. Value *HandleArgs[] = {cbGV, zeroIdx};
  1981. llvm::FunctionType *SubscriptFuncTy =
  1982. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  1983. Function *subscriptFunc =
  1984. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  1985. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1986. Constant *opArg =
  1987. ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1988. Value *args[] = {opArg, nullptr, zeroIdx};
  1989. llvm::LLVMContext &Context = M.getContext();
  1990. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  1991. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  1992. std::vector<Value *> indexArray(CB.GetConstants().size());
  1993. std::vector<std::unordered_set<Function *>> constUsedFuncList(
  1994. CB.GetConstants().size());
  1995. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1996. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  1997. indexArray[C->GetID()] = idx;
  1998. Value *GV = C->GetGlobalSymbol();
  1999. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  2000. }
  2001. for (Function &F : M.functions()) {
  2002. if (F.isDeclaration())
  2003. continue;
  2004. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  2005. continue;
  2006. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  2007. // create HL subscript to make all the use of cbuffer start from it.
  2008. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx - 1] = cbGV;
  2009. CallInst *Handle = HLM.EmitHLOperationCall(
  2010. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, HandleArgs, M);
  2011. CallInst *OrigHandle = Handle;
  2012. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  2013. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), Builder);
  2014. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  2015. Instruction *cbSubscript =
  2016. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  2017. // Replace constant var with GEP pGV
  2018. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2019. Value *GV = C->GetGlobalSymbol();
  2020. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  2021. continue;
  2022. Value *idx = indexArray[C->GetID()];
  2023. if (!isCBArray) {
  2024. Instruction *GEP = cast<Instruction>(
  2025. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  2026. // TODO: make sure the debug info is synced to GEP.
  2027. // GEP->setDebugLoc(GV);
  2028. ReplaceUseInFunction(GV, GEP, &F, Builder);
  2029. // Delete if no use in F.
  2030. if (GEP->user_empty())
  2031. GEP->eraseFromParent();
  2032. } else {
  2033. for (auto U = GV->user_begin(); U != GV->user_end();) {
  2034. User *user = *(U++);
  2035. if (user->user_empty())
  2036. continue;
  2037. Instruction *I = dyn_cast<Instruction>(user);
  2038. if (I && I->getParent()->getParent() != &F)
  2039. continue;
  2040. IRBuilder<> *instBuilder = &Builder;
  2041. std::unique_ptr<IRBuilder<>> B;
  2042. if (I) {
  2043. B = llvm::make_unique<IRBuilder<>>(I);
  2044. instBuilder = B.get();
  2045. }
  2046. GEPOperator *GEPOp = cast<GEPOperator>(user);
  2047. std::vector<Value *> idxList;
  2048. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  2049. "must indexing ConstantBuffer array");
  2050. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  2051. gep_type_iterator GI = gep_type_begin(*GEPOp),
  2052. E = gep_type_end(*GEPOp);
  2053. idxList.push_back(GI.getOperand());
  2054. // change array index with 0 for struct index.
  2055. idxList.push_back(zero);
  2056. GI++;
  2057. Value *arrayIdx = GI.getOperand();
  2058. GI++;
  2059. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  2060. ++GI, ++curIndex) {
  2061. arrayIdx = instBuilder->CreateMul(
  2062. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  2063. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  2064. }
  2065. for (; GI != E; ++GI) {
  2066. idxList.push_back(GI.getOperand());
  2067. }
  2068. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx - 1] = arrayIdx;
  2069. CallInst *Handle =
  2070. HLM.EmitHLOperationCall(*instBuilder,
  2071. HLOpcodeGroup::HLCreateHandle, 0,
  2072. HandleTy, HandleArgs, M);
  2073. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  2074. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), *instBuilder);
  2075. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  2076. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  2077. Instruction *cbSubscript =
  2078. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  2079. Instruction *NewGEP = cast<Instruction>(
  2080. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  2081. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  2082. }
  2083. }
  2084. }
  2085. // Delete if no use in F.
  2086. if (cbSubscript->user_empty()) {
  2087. cbSubscript->eraseFromParent();
  2088. Handle->eraseFromParent();
  2089. OrigHandle->eraseFromParent();
  2090. } else {
  2091. // merge GEP use for cbSubscript.
  2092. HLModule::MergeGepUse(cbSubscript);
  2093. }
  2094. }
  2095. return true;
  2096. }
  2097. void ConstructCBufferAnnotation(
  2098. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  2099. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2100. Value *GV = CB.GetGlobalSymbol();
  2101. llvm::StructType *CBStructTy =
  2102. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  2103. if (!CBStructTy) {
  2104. // For Array of ConstantBuffer.
  2105. llvm::ArrayType *CBArrayTy =
  2106. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  2107. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  2108. }
  2109. DxilStructAnnotation *CBAnnotation =
  2110. dxilTypeSys.AddStructAnnotation(CBStructTy);
  2111. CBAnnotation->SetCBufferSize(CB.GetSize());
  2112. // Set fieldAnnotation for each constant var.
  2113. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  2114. Constant *GV = C->GetGlobalSymbol();
  2115. DxilFieldAnnotation &fieldAnnotation =
  2116. CBAnnotation->GetFieldAnnotation(C->GetID());
  2117. fieldAnnotation = AnnotationMap[GV];
  2118. // This is after CBuffer allocation.
  2119. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  2120. fieldAnnotation.SetFieldName(C->GetGlobalName());
  2121. }
  2122. }
  2123. void ConstructCBuffer(
  2124. HLModule &HLM, llvm::Type *CBufferType,
  2125. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  2126. DxilTypeSystem &dxilTypeSys = HLM.GetTypeSystem();
  2127. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  2128. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2129. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2130. if (CB.GetConstants().size() == 0) {
  2131. // Create Fake variable for cbuffer which is empty.
  2132. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2133. *HLM.GetModule(), CBufferType, true,
  2134. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2135. CB.SetGlobalSymbol(pGV);
  2136. } else {
  2137. bool bCreated = CreateCBufferVariable(CB, HLM, HandleTy);
  2138. if (bCreated)
  2139. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  2140. else {
  2141. // Create Fake variable for cbuffer which is unused.
  2142. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  2143. *HLM.GetModule(), CBufferType, true,
  2144. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  2145. CB.SetGlobalSymbol(pGV);
  2146. }
  2147. }
  2148. // Clear the constants which useless now.
  2149. CB.GetConstants().clear();
  2150. }
  2151. }
  2152. } // namespace
  2153. namespace CGHLSLMSHelper {
  2154. // Align cbuffer offset in legacy mode (16 bytes per row).
  2155. unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  2156. unsigned scalarSizeInBytes,
  2157. bool bNeedNewRow) {
  2158. if (unsigned remainder = (offset & 0xf)) {
  2159. // Start from new row
  2160. if (remainder + size > 16 || bNeedNewRow) {
  2161. return offset + 16 - remainder;
  2162. }
  2163. // If not, naturally align data
  2164. return RoundToAlign(offset, scalarSizeInBytes);
  2165. }
  2166. return offset;
  2167. }
  2168. // Translate RayQuery constructor. From:
  2169. // %call = call %"RayQuery<flags>" @<constructor>(%"RayQuery<flags>" %ptr)
  2170. // To:
  2171. // i32 %handle = AllocateRayQuery(i32 <IntrinsicOp::IOP_AllocateRayQuery>, i32
  2172. // %flags) %gep = GEP %"RayQuery<flags>" %ptr, 0, 0 store i32* %gep, i32
  2173. // %handle ; and replace uses of %call with %ptr
  2174. void TranslateRayQueryConstructor(HLModule &HLM) {
  2175. llvm::Module &M = *HLM.GetModule();
  2176. SmallVector<Function *, 4> Constructors;
  2177. for (auto &F : M.functions()) {
  2178. // Match templated RayQuery constructor instantiation by prefix and
  2179. // signature. It should be impossible to achieve the same signature from
  2180. // HLSL.
  2181. if (!F.getName().startswith("\01??0?$RayQuery@$"))
  2182. continue;
  2183. llvm::Type *Ty = F.getReturnType();
  2184. if (!Ty->isPointerTy() ||
  2185. !dxilutil::IsHLSLRayQueryType(Ty->getPointerElementType()))
  2186. continue;
  2187. if (F.arg_size() != 1 || Ty != F.arg_begin()->getType())
  2188. continue;
  2189. Constructors.emplace_back(&F);
  2190. }
  2191. for (auto pConstructorFunc : Constructors) {
  2192. llvm::IntegerType *i32Ty = llvm::Type::getInt32Ty(M.getContext());
  2193. llvm::ConstantInt *i32Zero =
  2194. llvm::ConstantInt::get(i32Ty, (uint64_t)0, false);
  2195. llvm::FunctionType *funcTy =
  2196. llvm::FunctionType::get(i32Ty, {i32Ty, i32Ty}, false);
  2197. unsigned opcode = (unsigned)IntrinsicOp::IOP_AllocateRayQuery;
  2198. llvm::ConstantInt *opVal = llvm::ConstantInt::get(i32Ty, opcode, false);
  2199. Function *opFunc =
  2200. GetOrCreateHLFunction(M, funcTy, HLOpcodeGroup::HLIntrinsic, opcode);
  2201. while (!pConstructorFunc->user_empty()) {
  2202. Value *V = *pConstructorFunc->user_begin();
  2203. llvm::CallInst *CI = cast<CallInst>(V); // Must be call
  2204. llvm::Value *pThis = CI->getArgOperand(0);
  2205. llvm::StructType *pRQType =
  2206. cast<llvm::StructType>(pThis->getType()->getPointerElementType());
  2207. DxilStructAnnotation *SA =
  2208. HLM.GetTypeSystem().GetStructAnnotation(pRQType);
  2209. DXASSERT(SA, "otherwise, could not find type annoation for RayQuery "
  2210. "specialization");
  2211. DXASSERT(SA->GetNumTemplateArgs() == 1 &&
  2212. SA->GetTemplateArgAnnotation(0).IsIntegral(),
  2213. "otherwise, RayQuery has changed, or lacks template args");
  2214. llvm::IRBuilder<> Builder(CI);
  2215. llvm::Value *rayFlags =
  2216. Builder.getInt32(SA->GetTemplateArgAnnotation(0).GetIntegral());
  2217. llvm::Value *Call =
  2218. Builder.CreateCall(opFunc, {opVal, rayFlags}, pThis->getName());
  2219. llvm::Value *GEP = Builder.CreateInBoundsGEP(pThis, {i32Zero, i32Zero});
  2220. Builder.CreateStore(Call, GEP);
  2221. CI->replaceAllUsesWith(pThis);
  2222. CI->eraseFromParent();
  2223. }
  2224. pConstructorFunc->eraseFromParent();
  2225. }
  2226. }
  2227. } // namespace CGHLSLMSHelper
  2228. namespace {
  2229. bool BuildImmInit(Function *Ctor) {
  2230. GlobalVariable *GV = nullptr;
  2231. SmallVector<Constant *, 4> ImmList;
  2232. bool allConst = true;
  2233. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2234. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  2235. Value *V = SI->getValueOperand();
  2236. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  2237. allConst = false;
  2238. break;
  2239. }
  2240. ImmList.emplace_back(cast<Constant>(V));
  2241. Value *Ptr = SI->getPointerOperand();
  2242. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  2243. Ptr = GepOp->getPointerOperand();
  2244. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  2245. if (GV == nullptr)
  2246. GV = pGV;
  2247. else {
  2248. DXASSERT(GV == pGV, "else pointer mismatch");
  2249. }
  2250. }
  2251. }
  2252. } else {
  2253. if (!isa<ReturnInst>(*I)) {
  2254. allConst = false;
  2255. break;
  2256. }
  2257. }
  2258. }
  2259. if (!allConst)
  2260. return false;
  2261. if (!GV)
  2262. return false;
  2263. llvm::Type *Ty = GV->getType()->getElementType();
  2264. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  2265. // TODO: support other types.
  2266. if (!AT)
  2267. return false;
  2268. if (ImmList.size() != AT->getNumElements())
  2269. return false;
  2270. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  2271. GV->setInitializer(Init);
  2272. return true;
  2273. }
  2274. void CallCtorFunctionsAtInsertPt(llvm::Module &M,
  2275. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2276. Instruction *InsertPt) {
  2277. IRBuilder<> Builder(InsertPt);
  2278. for (Function *Ctor : Ctors) {
  2279. Builder.CreateCall(Ctor);
  2280. }
  2281. }
  2282. void CollectFunctionCallers(Function *F, DenseSet<Function *> &Callers) {
  2283. // worklist size max = call depth
  2284. SmallVector<Function *, 8> worklist;
  2285. worklist.push_back(F);
  2286. // add callers
  2287. while (worklist.size()) {
  2288. Function *F = worklist.pop_back_val();
  2289. for (User *U : F->users()) {
  2290. if (CallInst *CI = dyn_cast<CallInst>(U)) {
  2291. Function *Caller = CI->getParent()->getParent();
  2292. if (Callers.insert(Caller).second == true) {
  2293. // new caller
  2294. worklist.push_back(Caller);
  2295. }
  2296. }
  2297. }
  2298. }
  2299. }
  2300. DenseSet<Function *> CollectExternalFunctionCallers(Module &M) {
  2301. DenseSet<Function *> Callers;
  2302. for (Function &F : M) {
  2303. if (!F.isIntrinsic() && F.isDeclaration() &&
  2304. hlsl::GetHLOpcodeGroup(&F) == hlsl::HLOpcodeGroup::NotHL) {
  2305. CollectFunctionCallers(&F, Callers);
  2306. }
  2307. }
  2308. return Callers;
  2309. }
  2310. // If static initializers contain calls to external functions, this can
  2311. // introduce inter-module init function ordering dependencies. Some
  2312. // dependencies may even introduce contradictions. Creating and implementing an
  2313. // intuitive standard approach to solve this is likely quite difficult. Better
  2314. // to disallow the ambiguous and unlikely case for now.
  2315. bool IsValidCtorFunction(Function *F, DenseSet<Function *> &Callers) {
  2316. return Callers.count(F) == 0;
  2317. }
  2318. void ReportInitStaticGlobalWithExternalFunction(
  2319. clang::CodeGen ::CodeGenModule &CGM, StringRef name) {
  2320. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2321. unsigned DiagID = Diags.getCustomDiagID(
  2322. clang::DiagnosticsEngine::Error,
  2323. "Initializer for static global %0 makes disallowed call to external function.");
  2324. std::string escaped;
  2325. llvm::raw_string_ostream os(escaped);
  2326. size_t end = name.find_first_of('@');
  2327. if (end != StringRef::npos)
  2328. name = name.substr(0, end);
  2329. StringRef prefix = "\01??__E";
  2330. if (name.startswith(prefix))
  2331. name = name.substr(prefix.size());
  2332. dxilutil::PrintEscapedString(name, os);
  2333. Diags.Report(DiagID) << os.str();
  2334. }
  2335. } // namespace
  2336. namespace CGHLSLMSHelper {
  2337. void CollectCtorFunctions(llvm::Module &M, llvm::StringRef globalName,
  2338. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2339. clang::CodeGen::CodeGenModule &CGM) {
  2340. // add global call to entry func
  2341. GlobalVariable *GV = M.getGlobalVariable(globalName);
  2342. if (!GV)
  2343. return;
  2344. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  2345. if (!CA)
  2346. return;
  2347. DenseSet<Function *> Callers = CollectExternalFunctionCallers(M);
  2348. bool allEvaluated = true;
  2349. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e; ++i) {
  2350. if (isa<ConstantAggregateZero>(*i))
  2351. continue;
  2352. ConstantStruct *CS = cast<ConstantStruct>(*i);
  2353. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  2354. continue;
  2355. // Must have a function or null ptr.
  2356. if (!isa<Function>(CS->getOperand(1)))
  2357. continue;
  2358. Function *Ctor = cast<Function>(CS->getOperand(1));
  2359. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  2360. "function type must be void (void)");
  2361. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2362. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  2363. Function *F = CI->getCalledFunction();
  2364. // Try to build imm initilizer.
  2365. // If not work, add global call to entry func.
  2366. if (BuildImmInit(F) == false) {
  2367. allEvaluated = false;
  2368. if (IsValidCtorFunction(F, Callers)) {
  2369. Ctors.emplace_back(F);
  2370. } else {
  2371. ReportInitStaticGlobalWithExternalFunction(CGM, F->getName());
  2372. }
  2373. }
  2374. } else {
  2375. DXASSERT(isa<ReturnInst>(&(*I)),
  2376. "else invalid Global constructor function");
  2377. }
  2378. }
  2379. }
  2380. // If all globals constructors are replaced with initializers, just get rid
  2381. // of the GV.
  2382. if (allEvaluated) {
  2383. GV->eraseFromParent();
  2384. }
  2385. }
  2386. void ProcessCtorFunctions(llvm::Module &M,
  2387. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2388. llvm::Function *Entry,
  2389. llvm::Function *PatchConstantFn) {
  2390. if (PatchConstantFn) {
  2391. // static globals are independent for entry function and patch constant
  2392. // function. Update static global in entry function will not affect
  2393. // value in patch constant function. So just call ctors for patch
  2394. // constant function too.
  2395. CallCtorFunctionsAtInsertPt(
  2396. M, Ctors, PatchConstantFn->getEntryBlock().getFirstInsertionPt());
  2397. IRBuilder<> B(PatchConstantFn->getEntryBlock().getFirstInsertionPt());
  2398. // For static globals which has const initialize value, copy it at
  2399. // beginning of patch constant function to avoid use value updated by
  2400. // entry function.
  2401. for (GlobalVariable &GV : M.globals()) {
  2402. if (GV.isConstant())
  2403. continue;
  2404. if (!GV.hasInitializer())
  2405. continue;
  2406. if (GV.getName() == "llvm.global_ctors")
  2407. continue;
  2408. Value *V = GV.getInitializer();
  2409. if (isa<UndefValue>(V))
  2410. continue;
  2411. B.CreateStore(V, &GV);
  2412. }
  2413. }
  2414. CallCtorFunctionsAtInsertPt(M, Ctors,
  2415. Entry->getEntryBlock().getFirstInsertionPt());
  2416. }
  2417. void FinishCBuffer(HLModule &HLM, llvm::Type *CBufferType,
  2418. std::unordered_map<Constant *, DxilFieldAnnotation>
  2419. &constVarAnnotationMap) {
  2420. // Allocate constant buffers.
  2421. AllocateDxilConstantBuffers(HLM, constVarAnnotationMap);
  2422. // TODO: create temp variable for constant which has store use.
  2423. // Create Global variable and type annotation for each CBuffer.
  2424. ConstructCBuffer(HLM, CBufferType, constVarAnnotationMap);
  2425. }
  2426. void AddRegBindingsForResourceInConstantBuffer(
  2427. HLModule &HLM,
  2428. llvm::DenseMap<llvm::Constant *,
  2429. llvm::SmallVector<std::pair<DXIL::ResourceClass, unsigned>,
  2430. 1>> &constantRegBindingMap) {
  2431. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2432. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2433. auto &Constants = CB.GetConstants();
  2434. for (unsigned j = 0; j < Constants.size(); j++) {
  2435. const std::unique_ptr<DxilResourceBase> &C = Constants[j];
  2436. Constant *CGV = C->GetGlobalSymbol();
  2437. auto &regBindings = constantRegBindingMap[CGV];
  2438. if (regBindings.empty())
  2439. continue;
  2440. unsigned Srv = UINT_MAX;
  2441. unsigned Uav = UINT_MAX;
  2442. unsigned Sampler = UINT_MAX;
  2443. for (auto it : regBindings) {
  2444. unsigned RegNum = it.second;
  2445. switch (it.first) {
  2446. case DXIL::ResourceClass::SRV:
  2447. Srv = RegNum;
  2448. break;
  2449. case DXIL::ResourceClass::UAV:
  2450. Uav = RegNum;
  2451. break;
  2452. case DXIL::ResourceClass::Sampler:
  2453. Sampler = RegNum;
  2454. break;
  2455. default:
  2456. DXASSERT(0, "invalid resource class");
  2457. break;
  2458. }
  2459. }
  2460. HLM.AddRegBinding(CB.GetID(), j, Srv, Uav, Sampler);
  2461. }
  2462. }
  2463. }
  2464. // extension codegen.
  2465. void ExtensionCodeGen(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  2466. // Add semantic defines for extensions if any are available.
  2467. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  2468. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(
  2469. HLM.GetModule());
  2470. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2471. for (const HLSLExtensionsCodegenHelper::SemanticDefineError &error : errors) {
  2472. clang::DiagnosticsEngine::Level level = clang::DiagnosticsEngine::Error;
  2473. if (error.IsWarning())
  2474. level = clang::DiagnosticsEngine::Warning;
  2475. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  2476. Diags.Report(clang::SourceLocation::getFromRawEncoding(error.Location()),
  2477. DiagID)
  2478. << error.Message();
  2479. }
  2480. // Add root signature from a #define. Overrides root signature in function
  2481. // attribute.
  2482. {
  2483. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  2484. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  2485. HLSLExtensionsCodegenHelper::CustomRootSignature::Status status =
  2486. CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(
  2487. &customRootSig);
  2488. if (status == Status::FOUND) {
  2489. DxilRootSignatureVersion rootSigVer;
  2490. // set root signature version.
  2491. if (CGM.getLangOpts().RootSigMinor == 0) {
  2492. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  2493. } else {
  2494. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  2495. "else CGMSHLSLRuntime Constructor needs to be updated");
  2496. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  2497. }
  2498. RootSignatureHandle RootSigHandle;
  2499. CompileRootSignature(
  2500. customRootSig.RootSignature, Diags,
  2501. clang::SourceLocation::getFromRawEncoding(
  2502. customRootSig.EncodedSourceLocation),
  2503. rootSigVer, DxilRootSignatureCompilationFlags::GlobalRootSignature,
  2504. &RootSigHandle);
  2505. if (!RootSigHandle.IsEmpty()) {
  2506. RootSigHandle.EnsureSerializedAvailable();
  2507. HLM.SetSerializedRootSignature(RootSigHandle.GetSerializedBytes(),
  2508. RootSigHandle.GetSerializedSize());
  2509. }
  2510. }
  2511. }
  2512. }
  2513. } // namespace CGHLSLMSHelper
  2514. namespace {
  2515. void ReportDisallowedTypeInExportParam(clang::CodeGen ::CodeGenModule &CGM,
  2516. StringRef name) {
  2517. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2518. unsigned DiagID =
  2519. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2520. "Exported function %0 must not contain a "
  2521. "resource in parameter or return type.");
  2522. std::string escaped;
  2523. llvm::raw_string_ostream os(escaped);
  2524. dxilutil::PrintEscapedString(name, os);
  2525. Diags.Report(DiagID) << os.str();
  2526. }
  2527. } // namespace
  2528. namespace CGHLSLMSHelper {
  2529. void FinishClipPlane(HLModule &HLM, std::vector<Function *> &clipPlaneFuncList,
  2530. std::unordered_map<Value *, DebugLoc> &debugInfoMap,
  2531. clang::CodeGen::CodeGenModule &CGM) {
  2532. bool bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() ==
  2533. clang::CodeGenOptions::FullDebugInfo;
  2534. Module &M = *HLM.GetModule();
  2535. for (Function *F : clipPlaneFuncList) {
  2536. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2537. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  2538. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  2539. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  2540. if (!clipPlane)
  2541. continue;
  2542. if (bDebugInfo) {
  2543. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  2544. }
  2545. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  2546. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  2547. GlobalVariable *GV = new llvm::GlobalVariable(
  2548. M, Ty, /*IsConstant*/ false, // constant false to store.
  2549. llvm::GlobalValue::ExternalLinkage,
  2550. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  2551. Value *initVal = Builder.CreateLoad(clipPlane);
  2552. Builder.CreateStore(initVal, GV);
  2553. props.ShaderProps.VS.clipPlanes[i] = GV;
  2554. }
  2555. }
  2556. }
  2557. } // namespace CGHLSLMSHelper
  2558. namespace {
  2559. void LowerExportFunctions(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2560. dxilutil::ExportMap &exportMap,
  2561. StringMap<EntryFunctionInfo> &entryFunctionMap) {
  2562. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2563. Module &M = *HLM.GetModule();
  2564. if (bIsLib && !exportMap.empty()) {
  2565. for (auto &it : entryFunctionMap) {
  2566. if (HLM.HasDxilFunctionProps(it.second.Func)) {
  2567. const DxilFunctionProps &props =
  2568. HLM.GetDxilFunctionProps(it.second.Func);
  2569. if (props.IsHS())
  2570. exportMap.RegisterExportedFunction(
  2571. props.ShaderProps.HS.patchConstantFunc);
  2572. }
  2573. }
  2574. }
  2575. if (bIsLib && !exportMap.empty()) {
  2576. exportMap.BeginProcessing();
  2577. for (Function &f : M.functions()) {
  2578. if (f.isDeclaration() || f.isIntrinsic() ||
  2579. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL)
  2580. continue;
  2581. exportMap.ProcessFunction(&f, true);
  2582. }
  2583. // TODO: add subobject export names here.
  2584. if (!exportMap.EndProcessing()) {
  2585. for (auto &name : exportMap.GetNameCollisions()) {
  2586. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2587. unsigned DiagID = Diags.getCustomDiagID(
  2588. clang::DiagnosticsEngine::Error,
  2589. "Export name collides with another export: %0");
  2590. std::string escaped;
  2591. llvm::raw_string_ostream os(escaped);
  2592. dxilutil::PrintEscapedString(name, os);
  2593. Diags.Report(DiagID) << os.str();
  2594. }
  2595. for (auto &name : exportMap.GetUnusedExports()) {
  2596. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2597. unsigned DiagID =
  2598. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2599. "Could not find target for export: %0");
  2600. std::string escaped;
  2601. llvm::raw_string_ostream os(escaped);
  2602. dxilutil::PrintEscapedString(name, os);
  2603. Diags.Report(DiagID) << os.str();
  2604. }
  2605. }
  2606. }
  2607. for (auto &it : exportMap.GetFunctionRenames()) {
  2608. Function *F = it.first;
  2609. auto &renames = it.second;
  2610. if (renames.empty())
  2611. continue;
  2612. // Rename the original, if necessary, then clone the rest
  2613. if (renames.find(F->getName()) == renames.end())
  2614. F->setName(*renames.begin());
  2615. for (auto &itName : renames) {
  2616. if (F->getName() != itName) {
  2617. Function *pClone = CloneFunction(F, itName, &M, HLM.GetTypeSystem(),
  2618. HLM.GetTypeSystem());
  2619. // add DxilFunctionProps if entry
  2620. if (HLM.HasDxilFunctionProps(F)) {
  2621. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2622. auto newProps = llvm::make_unique<DxilFunctionProps>(props);
  2623. HLM.AddDxilFunctionProps(pClone, newProps);
  2624. }
  2625. }
  2626. }
  2627. }
  2628. }
  2629. void CheckResourceParameters(HLModule &HLM,
  2630. clang::CodeGen::CodeGenModule &CGM) {
  2631. Module &M = *HLM.GetModule();
  2632. for (Function &f : M.functions()) {
  2633. // Skip llvm intrinsics, non-external linkage, entry/patch constant func,
  2634. // and HL intrinsics
  2635. if (!f.isIntrinsic() &&
  2636. f.getLinkage() == GlobalValue::LinkageTypes::ExternalLinkage &&
  2637. !HLM.HasDxilFunctionProps(&f) && !HLM.IsPatchConstantShader(&f) &&
  2638. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2639. // Verify no resources in param/return types
  2640. if (dxilutil::ContainsHLSLObjectType(f.getReturnType())) {
  2641. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2642. continue;
  2643. }
  2644. for (auto &Arg : f.args()) {
  2645. if (dxilutil::ContainsHLSLObjectType(Arg.getType())) {
  2646. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2647. break;
  2648. }
  2649. }
  2650. }
  2651. }
  2652. }
  2653. } // namespace
  2654. namespace CGHLSLMSHelper {
  2655. void UpdateLinkage(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2656. dxilutil::ExportMap &exportMap,
  2657. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2658. StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  2659. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2660. Module &M = *HLM.GetModule();
  2661. // Pin entry point and constant buffers, mark everything else internal.
  2662. for (Function &f : M.functions()) {
  2663. if (!bIsLib) {
  2664. if (&f == HLM.GetEntryFunction() ||
  2665. IsPatchConstantFunction(&f, patchConstantFunctionMap) ||
  2666. f.isDeclaration()) {
  2667. if (f.isDeclaration() && !f.isIntrinsic() &&
  2668. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2669. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2670. unsigned DiagID = Diags.getCustomDiagID(
  2671. clang::DiagnosticsEngine::Error,
  2672. "External function used in non-library profile: %0");
  2673. std::string escaped;
  2674. llvm::raw_string_ostream os(escaped);
  2675. dxilutil::PrintEscapedString(f.getName(), os);
  2676. Diags.Report(DiagID) << os.str();
  2677. return;
  2678. }
  2679. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2680. } else {
  2681. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2682. }
  2683. }
  2684. // Skip no inline functions.
  2685. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  2686. continue;
  2687. // Always inline for used functions.
  2688. if (!f.user_empty() && !f.isDeclaration())
  2689. f.addFnAttr(llvm::Attribute::AlwaysInline);
  2690. }
  2691. LowerExportFunctions(HLM, CGM, exportMap, entryFunctionMap);
  2692. if (CGM.getCodeGenOpts().ExportShadersOnly) {
  2693. for (Function &f : M.functions()) {
  2694. // Skip declarations, intrinsics, shaders, and non-external linkage
  2695. if (f.isDeclaration() || f.isIntrinsic() ||
  2696. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2697. HLM.HasDxilFunctionProps(&f) || HLM.IsPatchConstantShader(&f) ||
  2698. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2699. continue;
  2700. // Mark non-shader user functions as InternalLinkage
  2701. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2702. }
  2703. }
  2704. // Now iterate hull shaders and make sure their corresponding patch constant
  2705. // functions are marked ExternalLinkage:
  2706. for (Function &f : M.functions()) {
  2707. if (f.isDeclaration() || f.isIntrinsic() ||
  2708. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2709. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage ||
  2710. !HLM.HasDxilFunctionProps(&f))
  2711. continue;
  2712. DxilFunctionProps &props = HLM.GetDxilFunctionProps(&f);
  2713. if (!props.IsHS())
  2714. continue;
  2715. Function *PCFunc = props.ShaderProps.HS.patchConstantFunc;
  2716. if (PCFunc->getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2717. PCFunc->setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2718. }
  2719. // Disallow resource arguments in (non-entry) function exports
  2720. // unless offline linking target.
  2721. if (bIsLib &&
  2722. HLM.GetShaderModel()->GetMinor() != ShaderModel::kOfflineMinor) {
  2723. CheckResourceParameters(HLM, CGM);
  2724. }
  2725. }
  2726. void FinishEntries(
  2727. HLModule &HLM, const EntryFunctionInfo &Entry,
  2728. clang::CodeGen::CodeGenModule &CGM,
  2729. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2730. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  2731. &HSEntryPatchConstantFuncAttr,
  2732. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  2733. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  2734. &patchConstantFunctionPropsMap) {
  2735. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2736. // Library don't have entry.
  2737. if (!bIsLib) {
  2738. SetEntryFunction(HLM, Entry.Func, CGM);
  2739. // If at this point we haven't determined the entry function it's an error.
  2740. if (HLM.GetEntryFunction() == nullptr) {
  2741. assert(CGM.getDiags().hasErrorOccurred() &&
  2742. "else SetEntryFunction should have reported this condition");
  2743. return;
  2744. }
  2745. // In back-compat mode (with /Gec flag) create a static global for each
  2746. // const global to allow writing to it.
  2747. // TODO: Verfiy the behavior of static globals in hull shader
  2748. if (CGM.getLangOpts().EnableDX9CompatMode &&
  2749. CGM.getLangOpts().HLSLVersion <= 2016)
  2750. CreateWriteEnabledStaticGlobals(HLM.GetModule(), HLM.GetEntryFunction());
  2751. if (HLM.GetShaderModel()->IsHS()) {
  2752. SetPatchConstantFunction(Entry, HSEntryPatchConstantFuncAttr,
  2753. patchConstantFunctionMap,
  2754. patchConstantFunctionPropsMap, HLM, CGM);
  2755. }
  2756. } else {
  2757. for (auto &it : entryFunctionMap) {
  2758. // skip clone if RT entry
  2759. if (HLM.GetDxilFunctionProps(it.second.Func).IsRay())
  2760. continue;
  2761. // TODO: change flattened function names to dx.entry.<name>:
  2762. // std::string entryName = (Twine(dxilutil::EntryPrefix) +
  2763. // it.getKey()).str();
  2764. CloneShaderEntry(it.second.Func, it.getKey(), HLM);
  2765. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  2766. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  2767. SetPatchConstantFunctionWithAttr(
  2768. it.second, AttrIter->second, patchConstantFunctionMap,
  2769. patchConstantFunctionPropsMap, HLM, CGM);
  2770. }
  2771. }
  2772. }
  2773. }
  2774. } // namespace CGHLSLMSHelper
  2775. namespace CGHLSLMSHelper {
  2776. void FinishIntrinsics(
  2777. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  2778. DxilObjectProperties &objectProperties) {
  2779. // Lower getResourceHeap before AddOpcodeParamForIntrinsics to skip automatic
  2780. // lower for getResourceFromHeap.
  2781. LowerGetResourceFromHeap(HLM, intrinsicMap);
  2782. // Lower bitcast use of CBV into cbSubscript.
  2783. LowerDynamicCBVUseToHandle(HLM, objectProperties);
  2784. // translate opcode into parameter for intrinsic functions
  2785. // Do this before CloneShaderEntry and TranslateRayQueryConstructor to avoid
  2786. // update valToResPropertiesMap for cloned inst.
  2787. AddOpcodeParamForIntrinsics(HLM, intrinsicMap, objectProperties);
  2788. }
  2789. // Add the dx.break temporary intrinsic and create Call Instructions
  2790. // to it for each branch that requires the artificial conditional.
  2791. void AddDxBreak(Module &M,
  2792. const SmallVector<llvm::BranchInst *, 16> &DxBreaks) {
  2793. if (DxBreaks.empty())
  2794. return;
  2795. // Collect functions that make use of any wave operations
  2796. // Only they will need the dx.break condition added
  2797. SmallPtrSet<Function *, 16> WaveUsers;
  2798. for (Function &F : M.functions()) {
  2799. HLOpcodeGroup opgroup = hlsl::GetHLOpcodeGroup(&F);
  2800. if (F.isDeclaration() && IsHLWaveSensitive(&F) &&
  2801. (opgroup == HLOpcodeGroup::HLIntrinsic ||
  2802. opgroup == HLOpcodeGroup::HLExtIntrinsic)) {
  2803. for (User *U : F.users()) {
  2804. CallInst *CI = cast<CallInst>(U);
  2805. WaveUsers.insert(CI->getParent()->getParent());
  2806. }
  2807. }
  2808. }
  2809. // If there are no wave users, not even the function declaration is needed
  2810. if (WaveUsers.empty())
  2811. return;
  2812. // Create the dx.break function
  2813. FunctionType *FT =
  2814. llvm::FunctionType::get(llvm::Type::getInt1Ty(M.getContext()), false);
  2815. Function *func =
  2816. cast<llvm::Function>(M.getOrInsertFunction(DXIL::kDxBreakFuncName, FT));
  2817. func->addFnAttr(Attribute::AttrKind::NoUnwind);
  2818. // For all break branches recorded previously, if the function they are in
  2819. // makes any use of a wave op, it may need to be artificially conditional.
  2820. // Make it so now. The CleanupDxBreak pass will remove those that aren't
  2821. // needed when more is known.
  2822. for (llvm::BranchInst *BI : DxBreaks) {
  2823. if (WaveUsers.count(BI->getParent()->getParent())) {
  2824. CallInst *Call = CallInst::Create(FT, func, ArrayRef<Value *>(), "", BI);
  2825. BI->setCondition(Call);
  2826. if (!BI->getMetadata(DXIL::kDxBreakMDName)) {
  2827. BI->setMetadata(DXIL::kDxBreakMDName,
  2828. llvm::MDNode::get(BI->getContext(), {}));
  2829. }
  2830. }
  2831. }
  2832. }
  2833. } // namespace CGHLSLMSHelper
  2834. namespace CGHLSLMSHelper {
  2835. ScopeInfo::ScopeInfo(Function *F) : maxRetLevel(0), bAllReturnsInIf(true) {
  2836. Scope FuncScope;
  2837. FuncScope.kind = Scope::ScopeKind::FunctionScope;
  2838. FuncScope.EndScopeBB = nullptr;
  2839. FuncScope.bWholeScopeReturned = false;
  2840. // Make it 0 to avoid check when get parent.
  2841. // All loop on scopes should check kind != FunctionScope.
  2842. FuncScope.parentScopeIndex = 0;
  2843. scopes.emplace_back(FuncScope);
  2844. scopeStack.emplace_back(0);
  2845. }
  2846. // When all returns is inside if which is not nested, the flow is still
  2847. // structurized even there're more than one return.
  2848. bool ScopeInfo::CanSkipStructurize() {
  2849. return bAllReturnsInIf && maxRetLevel < 2;
  2850. }
  2851. void ScopeInfo::AddScope(Scope::ScopeKind k, BasicBlock *endScopeBB) {
  2852. Scope Scope;
  2853. Scope.kind = k;
  2854. Scope.bWholeScopeReturned = false;
  2855. Scope.EndScopeBB = endScopeBB;
  2856. Scope.parentScopeIndex = scopeStack.back();
  2857. scopeStack.emplace_back(scopes.size());
  2858. scopes.emplace_back(Scope);
  2859. }
  2860. void ScopeInfo::AddIf(BasicBlock *endIfBB) {
  2861. AddScope(Scope::ScopeKind::IfScope, endIfBB);
  2862. }
  2863. void ScopeInfo::AddSwitch(BasicBlock *endSwitch) {
  2864. AddScope(Scope::ScopeKind::SwitchScope, endSwitch);
  2865. }
  2866. void ScopeInfo::AddLoop(BasicBlock *loopContinue, BasicBlock *endLoop) {
  2867. AddScope(Scope::ScopeKind::LoopScope, endLoop);
  2868. scopes.back().loopContinueBB = loopContinue;
  2869. }
  2870. void ScopeInfo::AddRet(BasicBlock *bbWithRet) {
  2871. Scope RetScope;
  2872. RetScope.kind = Scope::ScopeKind::ReturnScope;
  2873. RetScope.EndScopeBB = bbWithRet;
  2874. RetScope.parentScopeIndex = scopeStack.back();
  2875. // - 1 for function scope which is at scopeStack[0].
  2876. unsigned retLevel = scopeStack.size() - 1;
  2877. // save max nested level for ret.
  2878. maxRetLevel = std::max<unsigned>(maxRetLevel, retLevel);
  2879. bool bGotLoopOrSwitch = false;
  2880. for (auto it = scopeStack.rbegin(); it != scopeStack.rend(); it++) {
  2881. unsigned idx = *it;
  2882. Scope &S = scopes[idx];
  2883. switch (S.kind) {
  2884. default:
  2885. break;
  2886. case Scope::ScopeKind::LoopScope:
  2887. case Scope::ScopeKind::SwitchScope:
  2888. bGotLoopOrSwitch = true;
  2889. // For return inside loop and switch, can just break.
  2890. RetScope.parentScopeIndex = idx;
  2891. break;
  2892. }
  2893. if (bGotLoopOrSwitch)
  2894. break;
  2895. }
  2896. bAllReturnsInIf &= !bGotLoopOrSwitch;
  2897. // return finish current scope.
  2898. RetScope.bWholeScopeReturned = true;
  2899. // save retScope to rets.
  2900. rets.emplace_back(scopes.size());
  2901. scopes.emplace_back(RetScope);
  2902. // Don't need to put retScope to stack since it cannot nested other scopes.
  2903. }
  2904. void ScopeInfo::EndScope(bool bScopeFinishedWithRet) {
  2905. unsigned idx = scopeStack.pop_back_val();
  2906. Scope &Scope = GetScope(idx);
  2907. // If whole stmt is finished and end scope bb has not used(nothing branch to
  2908. // it). Then the whole scope is returned.
  2909. Scope.bWholeScopeReturned =
  2910. bScopeFinishedWithRet && Scope.EndScopeBB->user_empty();
  2911. }
  2912. Scope &ScopeInfo::GetScope(unsigned i) { return scopes[i]; }
  2913. void ScopeInfo::LegalizeWholeReturnedScope() {
  2914. // legalize scopes which whole scope returned.
  2915. // When whole scope is returned, the endScopeBB will be deleted in codeGen.
  2916. // Here update it to parent scope's endScope.
  2917. // Since the scopes are in order, so it will automatic update to the final
  2918. // target. A->B->C will just get A->C.
  2919. for (auto &S : scopes) {
  2920. if (S.bWholeScopeReturned && S.kind != Scope::ScopeKind::ReturnScope) {
  2921. S.EndScopeBB = scopes[S.parentScopeIndex].EndScopeBB;
  2922. }
  2923. }
  2924. }
  2925. } // namespace CGHLSLMSHelper
  2926. namespace {
  2927. void updateEndScope(
  2928. ScopeInfo &ScopeInfo,
  2929. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> &EndBBToScopeIndexMap,
  2930. BasicBlock *oldEndScope, BasicBlock *newEndScope) {
  2931. auto it = EndBBToScopeIndexMap.find(oldEndScope);
  2932. DXASSERT(it != EndBBToScopeIndexMap.end(),
  2933. "fail to find endScopeBB in EndBBToScopeIndexMap");
  2934. SmallVector<unsigned, 2> &scopeList = it->second;
  2935. // Don't need to update when not share endBB with other scope.
  2936. if (scopeList.size() < 2)
  2937. return;
  2938. for (unsigned i : scopeList) {
  2939. Scope &S = ScopeInfo.GetScope(i);
  2940. // Don't update return endBB, because that is the Block has return branch.
  2941. if (S.kind != Scope::ScopeKind::ReturnScope)
  2942. S.EndScopeBB = newEndScope;
  2943. }
  2944. EndBBToScopeIndexMap[newEndScope] = scopeList;
  2945. }
  2946. // Init ret value with undef to make sure it will not live thru loop inside
  2947. // callers.
  2948. // Because structurize return, the flow is controled by bIsReturned. The
  2949. // semantic is the same as multiple return, but without konwledge of
  2950. // bIsReturend, some path for structrized flow will have ret value not
  2951. // initialized.
  2952. // When function is called inside loop, ret value will live across the loop
  2953. // after inline.
  2954. void InitRetValue(BasicBlock *exitBB) {
  2955. Value *RetValPtr = nullptr;
  2956. if (ReturnInst *RI = dyn_cast<ReturnInst>(exitBB->getTerminator())) {
  2957. if (Value *RetV = RI->getReturnValue()) {
  2958. if (LoadInst *LI = dyn_cast<LoadInst>(RetV)) {
  2959. RetValPtr = LI->getPointerOperand();
  2960. }
  2961. }
  2962. }
  2963. if (!RetValPtr)
  2964. return;
  2965. if (AllocaInst *RetVAlloc = dyn_cast<AllocaInst>(RetValPtr)) {
  2966. IRBuilder<> B(RetVAlloc->getNextNode());
  2967. Type *Ty = RetVAlloc->getAllocatedType();
  2968. Value *Init = UndefValue::get(Ty);
  2969. if (Ty->isAggregateType()) {
  2970. // TODO: support aggreagate type and out parameters.
  2971. // Skip it here will cause undef on phi which the incoming path should
  2972. // never hit.
  2973. } else {
  2974. B.CreateStore(Init, RetVAlloc);
  2975. }
  2976. }
  2977. }
  2978. // For functions has multiple returns like
  2979. // float foo(float a, float b, float c) {
  2980. // float r = c;
  2981. // if (a > 0) {
  2982. // if (b > 0) {
  2983. // return -1;
  2984. // }
  2985. // ***
  2986. // }
  2987. // ...
  2988. // return r;
  2989. // }
  2990. // transform into
  2991. // float foo(float a, float b, float c) {
  2992. // bool bRet = false;
  2993. // float retV;
  2994. // float r = c;
  2995. // if (a > 0) {
  2996. // if (b > 0) {
  2997. // bRet = true;
  2998. // retV = -1;
  2999. // }
  3000. // if (!bRet) {
  3001. // ***
  3002. // }
  3003. // }
  3004. // if (!bRet) {
  3005. // ...
  3006. // retV = r;
  3007. // }
  3008. // return vRet;
  3009. // }
  3010. void StructurizeMultiRetFunction(Function *F, ScopeInfo &ScopeInfo,
  3011. bool bWaveEnabledStage,
  3012. SmallVector<BranchInst *, 16> &DxBreaks) {
  3013. if (ScopeInfo.CanSkipStructurize())
  3014. return;
  3015. // Get bbWithRets.
  3016. auto &rets = ScopeInfo.GetRetScopes();
  3017. IRBuilder<> B(F->getEntryBlock().begin());
  3018. Scope &FunctionScope = ScopeInfo.GetScope(0);
  3019. Type *boolTy = Type::getInt1Ty(F->getContext());
  3020. Constant *cTrue = ConstantInt::get(boolTy, 1);
  3021. Constant *cFalse = ConstantInt::get(boolTy, 0);
  3022. // bool bIsReturned = false;
  3023. AllocaInst *bIsReturned = B.CreateAlloca(boolTy, nullptr, "bReturned");
  3024. B.CreateStore(cFalse, bIsReturned);
  3025. Scope &RetScope = ScopeInfo.GetScope(rets[0]);
  3026. BasicBlock *exitBB = RetScope.EndScopeBB->getTerminator()->getSuccessor(0);
  3027. FunctionScope.EndScopeBB = exitBB;
  3028. // Find alloca for retunr val and init it to avoid undef after guard code with
  3029. // bIsReturned.
  3030. InitRetValue(exitBB);
  3031. ScopeInfo.LegalizeWholeReturnedScope();
  3032. // Map from endScopeBB to scope index.
  3033. // When 2 scopes share same endScopeBB, need to update endScopeBB after
  3034. // structurize.
  3035. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> EndBBToScopeIndexMap;
  3036. auto &scopes = ScopeInfo.GetScopes();
  3037. for (unsigned i = 0; i < scopes.size(); i++) {
  3038. Scope &S = scopes[i];
  3039. EndBBToScopeIndexMap[S.EndScopeBB].emplace_back(i);
  3040. }
  3041. DenseSet<unsigned> guardedSet;
  3042. for (auto it = rets.begin(); it != rets.end(); it++) {
  3043. unsigned scopeIndex = *it;
  3044. Scope *pCurScope = &ScopeInfo.GetScope(scopeIndex);
  3045. Scope *pRetParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  3046. // skip ret not in nested control flow.
  3047. if (pRetParentScope->kind == Scope::ScopeKind::FunctionScope)
  3048. continue;
  3049. do {
  3050. BasicBlock *BB = pCurScope->EndScopeBB;
  3051. // exit when scope is processed.
  3052. if (guardedSet.count(scopeIndex))
  3053. break;
  3054. guardedSet.insert(scopeIndex);
  3055. Scope *pParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  3056. BasicBlock *EndBB = pParentScope->EndScopeBB;
  3057. // When whole scope returned, just branch to endScope of parent.
  3058. if (pCurScope->bWholeScopeReturned) {
  3059. // For ret, just branch to endScope of parent.
  3060. if (pCurScope->kind == Scope::ScopeKind::ReturnScope) {
  3061. BasicBlock *retBB = pCurScope->EndScopeBB;
  3062. TerminatorInst *retBr = retBB->getTerminator();
  3063. IRBuilder<> B(retBr);
  3064. // Set bReturned to true.
  3065. B.CreateStore(cTrue, bIsReturned);
  3066. if (bWaveEnabledStage &&
  3067. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  3068. BranchInst *BI =
  3069. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  3070. DxBreaks.emplace_back(BI);
  3071. retBr->eraseFromParent();
  3072. } else {
  3073. // Update branch target.
  3074. retBr->setSuccessor(0, EndBB);
  3075. }
  3076. }
  3077. // For other scope, do nothing. Since whole scope is returned.
  3078. // Just flow naturally to parent scope.
  3079. } else {
  3080. // When only part scope returned.
  3081. // Use bIsReturned to guard to part which not returned.
  3082. switch (pParentScope->kind) {
  3083. case Scope::ScopeKind::ReturnScope:
  3084. DXASSERT(0, "return scope must get whole scope returned.");
  3085. break;
  3086. case Scope::ScopeKind::FunctionScope:
  3087. case Scope::ScopeKind::IfScope: {
  3088. // inside if.
  3089. // if (!bReturned) {
  3090. // rest of if or else.
  3091. // }
  3092. BasicBlock *CmpBB = BasicBlock::Create(BB->getContext(),
  3093. "bReturned.cmp.false", F, BB);
  3094. // Make BB preds go to cmpBB.
  3095. BB->replaceAllUsesWith(CmpBB);
  3096. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  3097. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  3098. IRBuilder<> B(CmpBB);
  3099. Value *isRetured = B.CreateLoad(bIsReturned, "bReturned.load");
  3100. Value *notReturned =
  3101. B.CreateICmpNE(isRetured, cFalse, "bReturned.not");
  3102. B.CreateCondBr(notReturned, EndBB, BB);
  3103. } break;
  3104. default: {
  3105. // inside switch/loop
  3106. // if (bReturned) {
  3107. // br endOfScope.
  3108. // }
  3109. BasicBlock *CmpBB =
  3110. BasicBlock::Create(BB->getContext(), "bReturned.cmp.true", F, BB);
  3111. BasicBlock *BreakBB =
  3112. BasicBlock::Create(BB->getContext(), "bReturned.break", F, BB);
  3113. BB->replaceAllUsesWith(CmpBB);
  3114. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  3115. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  3116. IRBuilder<> B(CmpBB);
  3117. Value *isReturned = B.CreateLoad(bIsReturned, "bReturned.load");
  3118. isReturned = B.CreateICmpEQ(isReturned, cTrue, "bReturned.true");
  3119. B.CreateCondBr(isReturned, BreakBB, BB);
  3120. B.SetInsertPoint(BreakBB);
  3121. if (bWaveEnabledStage &&
  3122. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  3123. BranchInst *BI =
  3124. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  3125. DxBreaks.emplace_back(BI);
  3126. } else {
  3127. B.CreateBr(EndBB);
  3128. }
  3129. } break;
  3130. }
  3131. }
  3132. scopeIndex = pCurScope->parentScopeIndex;
  3133. pCurScope = &ScopeInfo.GetScope(scopeIndex);
  3134. // done when reach function scope.
  3135. } while (pCurScope->kind != Scope::ScopeKind::FunctionScope);
  3136. }
  3137. }
  3138. } // namespace
  3139. namespace CGHLSLMSHelper {
  3140. void StructurizeMultiRet(Module &M, clang::CodeGen::CodeGenModule &CGM,
  3141. DenseMap<Function *, ScopeInfo> &ScopeMap,
  3142. bool bWaveEnabledStage,
  3143. SmallVector<BranchInst *, 16> &DxBreaks) {
  3144. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  3145. if (!CGM.getCodeGenOpts().HLSLExtensionsCodegen->IsOptionEnabled(
  3146. "structurize-returns"))
  3147. return;
  3148. } else {
  3149. if (!CGM.getCodeGenOpts().HLSLOptimizationToggles.count(
  3150. "structurize-returns") ||
  3151. !CGM.getCodeGenOpts()
  3152. .HLSLOptimizationToggles.find("structurize-returns")
  3153. ->second)
  3154. return;
  3155. }
  3156. for (Function &F : M) {
  3157. if (F.isDeclaration())
  3158. continue;
  3159. auto it = ScopeMap.find(&F);
  3160. if (it == ScopeMap.end())
  3161. continue;
  3162. StructurizeMultiRetFunction(&F, it->second, bWaveEnabledStage, DxBreaks);
  3163. }
  3164. }
  3165. bool DxilObjectProperties::AddResource(llvm::Value *V, const hlsl::DxilResourceProperties &RP) {
  3166. if (RP.isValid()) {
  3167. DXASSERT(!GetResource(V).isValid() || GetResource(V) == RP, "otherwise, property conflict");
  3168. resMap[V] = RP;
  3169. return true;
  3170. }
  3171. return false;
  3172. }
  3173. bool DxilObjectProperties::IsResource(llvm::Value *V) {
  3174. return resMap.count(V) != 0;
  3175. }
  3176. hlsl::DxilResourceProperties DxilObjectProperties::GetResource(llvm::Value *V) {
  3177. auto it = resMap.find(V);
  3178. if (it != resMap.end())
  3179. return it->second;
  3180. return DxilResourceProperties();
  3181. }
  3182. } // namespace CGHLSLMSHelper